[automerger skipped] Cache MMAP client silenced state. am: 5ee3824389 am: b407f062fd -s ours am: cb03c9df0d -s ours am: f2be76627c -s ours am: 4b21717891 -s ours am: a7791a8ce3 -s ours am: 09bde957fb -s ours

am skip reason: Merged-In I49b5a0f08d1747053f868db6e88c0f677256fc3c with SHA-1 a2f00f95e0 is already in history

Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/19382430

Change-Id: I367987654f375a49213f8f9e96ff5212d49557fc
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/.clang-format b/.clang-format
new file mode 100644
index 0000000..a7614d2
--- /dev/null
+++ b/.clang-format
@@ -0,0 +1,13 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
diff --git a/Android.bp b/Android.bp
index 60f0ff1..ee609e1 100644
--- a/Android.bp
+++ b/Android.bp
@@ -57,7 +57,7 @@
             min_sdk_version: "29",
             apex_available: [
                 "//apex_available:platform",
-                "com.android.bluetooth.updatable",
+                "com.android.bluetooth",
                 "com.android.media",
                 "com.android.media.swcodec",
             ],
@@ -86,7 +86,7 @@
     min_sdk_version: "29",
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth.updatable",
+        "com.android.bluetooth",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/METADATA b/METADATA
index aabda36..146bfcb 100644
--- a/METADATA
+++ b/METADATA
@@ -2,22 +2,22 @@
 #     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
 #     DEPENDING ON IT IN YOUR PROJECT. ***
 third_party {
-  # would be NOTICE save for Widevine Master License Agreement in:
-  #   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
-  #   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
-  #   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
-  #   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
-  #   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
-  # and patent disclaimers in:
-  #   media/codec2/components/aac/patent_disclaimer.txt
-  #   media/codec2/components/amr_nb_wb/patent_disclaimer.txt
-  #   media/codec2/components/mp3/patent_disclaimer.txt
-  #   media/codec2/components/mpeg4_h263/patent_disclaimer.txt
-  #   media/codecs/amrnb/patent_disclaimer.txt
-  #   media/codecs/amrwb/dec/patent_disclaimer.txt
-  #   media/codecs/amrwb/enc/patent_disclaimer.txt
-  #   media/codecs/m4v_h263/patent_disclaimer.txt
-  #   media/codecs/mp3dec/patent_disclaimer.txt
-  #   media/libstagefright/codecs/aacenc/patent_disclaimer.txt
+  license_note: "would be NOTICE save for Widevine Master License Agreement in:\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h\n"
+  " and patent disclaimers in:\n"
+  "   media/codec2/components/aac/patent_disclaimer.txt\n"
+  "   media/codec2/components/amr_nb_wb/patent_disclaimer.txt\n"
+  "   media/codec2/components/mp3/patent_disclaimer.txt\n"
+  "   media/codec2/components/mpeg4_h263/patent_disclaimer.txt\n"
+  "   media/codecs/amrnb/patent_disclaimer.txt\n"
+  "   media/codecs/amrwb/dec/patent_disclaimer.txt\n"
+  "   media/codecs/amrwb/enc/patent_disclaimer.txt\n"
+  "   media/codecs/m4v_h263/patent_disclaimer.txt\n"
+  "   media/codecs/mp3dec/patent_disclaimer.txt\n"
+  "   media/libstagefright/codecs/aacenc/patent_disclaimer.txt"
   license_type: BY_EXCEPTION_ONLY
 }
diff --git a/OWNERS b/OWNERS
index 0be1196..40c65e7 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,7 +1,6 @@
 # Bug component: 1344
 elaurent@google.com
 etalvala@google.com
-hkuang@google.com
 lajos@google.com
 
 # go/android-fwk-media-solutions for info on areas of ownership.
diff --git a/apex/Android.bp b/apex/Android.bp
index b9abd12..570ca01 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -23,7 +23,6 @@
 
 apex_defaults {
     name: "com.android.media-defaults",
-    updatable: true,
     bootclasspath_fragments: ["com.android.media-bootclasspath-fragment"],
     systemserverclasspath_fragments: ["com.android.media-systemserverclasspath-fragment"],
     multilib: {
@@ -57,6 +56,7 @@
     prebuilts: [
         "code_coverage.policy",
         "com.android.media-mediatranscoding.rc",
+        "com.android.media-mediatranscoding.32rc",
         "crash_dump.policy",
         "mediaextractor.policy",
         "media-linker-config",
@@ -67,14 +67,13 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media-androidManifest",
 
-    // IMPORTANT: For the APEX to be installed on Android 10 (API 29),
-    // min_sdk_version should be 29. This enables the build system to make
+    // IMPORTANT: q-launched-apex-module enables the build system to make
     // sure the package compatible to Android 10 in two ways:
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    min_sdk_version: "29",
+    defaults: ["q-launched-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
@@ -126,6 +125,26 @@
     // modified by the Soong or platform compat team.
     hidden_api: {
         max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+
+        // The following packages contain classes from other modules on the
+        // bootclasspath. That means that the hidden API flags for this module
+        // has to explicitly list every single class this module provides in
+        // that package to differentiate them from the classes provided by other
+        // modules. That can include private classes that are not part of the
+        // API.
+        split_packages: [
+            "android.media",
+        ],
+
+        // The following packages and all their subpackages currently only
+        // contain classes from this bootclasspath_fragment. Listing a package
+        // here won't prevent other bootclasspath modules from adding classes in
+        // any of those packages but it will prevent them from adding those
+        // classes into an API surface, e.g. public, system, etc.. Doing so will
+        // result in a build failure due to inconsistent flags.
+        package_prefixes: [
+            "android.media.internal",
+        ],
     },
 }
 
@@ -148,7 +167,6 @@
 
 apex_defaults {
     name: "com.android.media.swcodec-defaults",
-    updatable: true,
     binaries: [
         "mediaswcodec",
     ],
@@ -160,6 +178,7 @@
     ],
     prebuilts: [
         "com.android.media.swcodec-mediaswcodec.rc",
+        "com.android.media.swcodec-mediaswcodec.32rc",
         "com.android.media.swcodec-ld.config.txt",
         "mediaswcodec.policy",
         "code_coverage.policy",
@@ -172,30 +191,46 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media.swcodec-androidManifest",
 
-    // IMPORTANT: For the APEX to be installed on Android 10 (API 29),
-    // min_sdk_version should be 29. This enables the build system to make
+    // IMPORTANT: q-launched-apex-module enables the build system to make
     // sure the package compatible to Android 10 in two ways:
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    min_sdk_version: "29",
+    defaults: ["q-launched-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
 }
 
+// install as mediatranscoding.* and mediaswcodec.* instead of init.*
+// so we are ready for day we have more than 1 *rc file within the apex.
+
 prebuilt_etc {
     name: "com.android.media-mediatranscoding.rc",
     src: "mediatranscoding.rc",
-    filename: "init.rc",
+    filename: "mediatranscoding.rc",
+    installable: false,
+}
+
+prebuilt_etc {
+    name: "com.android.media-mediatranscoding.32rc",
+    src: "mediatranscoding.32rc",
+    filename: "mediatranscoding.32rc",
     installable: false,
 }
 
 prebuilt_etc {
     name: "com.android.media.swcodec-mediaswcodec.rc",
     src: "mediaswcodec.rc",
-    filename: "init.rc",
+    filename: "mediaswcodec.rc",
+    installable: false,
+}
+
+prebuilt_etc {
+    name: "com.android.media.swcodec-mediaswcodec.32rc",
+    src: "mediaswcodec.32rc",
+    filename: "mediaswcodec.32rc",
     installable: false,
 }
 
diff --git a/apex/OWNERS b/apex/OWNERS
index 5587f5f..54802d4 100644
--- a/apex/OWNERS
+++ b/apex/OWNERS
@@ -1,6 +1,7 @@
-chz@google.com
-dwkang@google.com
+essick@google.com
 jiyong@google.com
 lajos@google.com
-marcone@google.com
-wjia@google.com
+nchalko@google.com
+
+include platform/packages/modules/common:/MODULES_OWNERS
+
diff --git a/apex/manifest.json b/apex/manifest.json
index 5d72031..4b75b04 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,6 @@
 {
   "name": "com.android.media",
-  "version": 319999900,
+  "version": 339990000,
   "requireNativeLibs": [
     "libandroid.so",
     "libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index b0d962d..fbcbb69 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
 {
   "name": "com.android.media.swcodec",
-  "version": 319999900,
+  "version": 339990000,
   "requireNativeLibs": [
     ":sphal"
   ]
diff --git a/apex/mediaswcodec.32rc b/apex/mediaswcodec.32rc
new file mode 100644
index 0000000..f40d172
--- /dev/null
+++ b/apex/mediaswcodec.32rc
@@ -0,0 +1,8 @@
+##  for SDK releases >= 32
+##
+service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
+    class main
+    user mediacodec
+    group camera drmrpc mediadrm
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index 0c9b8c8..46799c7 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -1,3 +1,6 @@
+##  for SDK releases 29..31
+##  where writepid has not yet been replaced by task_profiles
+##
 service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
     class main
     user mediacodec
diff --git a/apex/mediatranscoding.32rc b/apex/mediatranscoding.32rc
new file mode 100644
index 0000000..edba9b9
--- /dev/null
+++ b/apex/mediatranscoding.32rc
@@ -0,0 +1,15 @@
+##  for SDK releases >= 32
+##
+#
+# media.transcoding service is defined on com.android.media apex which goes back
+# to API29, but we only want it started on API31+ devices. So we declare it as
+# "disabled" and start it explicitly on boot.
+service media.transcoding /apex/com.android.media/bin/mediatranscoding
+    class main
+    user media
+    group media
+    ioprio rt 4
+    # Restrict to little cores only with system-background cpuset.
+    task_profiles ServiceCapacityLow
+    interface aidl media.transcoding
+    disabled
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index ae9f8ba..6e453be 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -1,3 +1,7 @@
+##  for SDK releases 29..31
+##  where writepid has not yet been replaced by task_profiles
+##
+#
 # media.transcoding service is defined on com.android.media apex which goes back
 # to API29, but we only want it started on API31+ devices. So we declare it as
 # "disabled" and start it explicitly on boot.
diff --git a/camera/Android.bp b/camera/Android.bp
index 6878c20..e44202b 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -43,6 +43,10 @@
     ],
 }
 
+cc_library_headers {
+    name: "camera_headers",
+    export_include_dirs: ["include"],
+}
 cc_library_shared {
     name: "libcamera_client",
 
@@ -109,6 +113,30 @@
 
 }
 
+cc_library_host_static {
+    name: "libcamera_client_host",
+
+    srcs: [
+        "CameraMetadata.cpp",
+        "VendorTagDescriptor.cpp",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcamera_metadata",
+    ],
+
+    include_dirs: [
+        "system/media/private/camera/include",
+        "frameworks/native/include/media/openmax",
+    ],
+
+    export_include_dirs: [
+        "include",
+        "include/camera"
+    ],
+}
+
 // AIDL interface between camera clients and the camera service.
 filegroup {
     name: "libcamera_client_aidl",
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 03439fd..24c9108 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -68,6 +68,9 @@
         unavailablePhysicalIds16.push_back(String16(id8));
     }
     res = parcel->writeString16Vector(unavailablePhysicalIds16);
+    if (res != OK) return res;
+
+    res = parcel->writeString16(String16(clientPackage));
     return res;
 }
 
@@ -86,6 +89,12 @@
     for (auto& id16 : unavailablePhysicalIds16) {
         unavailablePhysicalIds.push_back(String8(id16));
     }
+
+    String16 tempClientPackage;
+    res = parcel->readString16(&tempClientPackage);
+    if (res != OK) return res;
+    clientPackage = String8(tempClientPackage);
+
     return res;
 }
 
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 28e037f..d1aa36a 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -52,6 +52,12 @@
         return err;
     }
 
+    float maxPreviewFps = 0;
+    if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
+        return err;
+    }
+
     int dataSpace = 0;
     if ((err = parcel->readInt32(&dataSpace)) != OK) {
         ALOGE("%s: Failed to read dataSpace from parcel", __FUNCTION__);
@@ -112,9 +118,22 @@
         return err;
     }
 
+    int64_t dynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    if ((err = parcel->readInt64(&dynamicRangeProfile)) != OK) {
+        ALOGE("%s: Failed to read dynamic range profile type from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    if ((err = parcel->readInt64(&streamUseCase)) != OK) {
+        ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+        return err;
+    }
+
     mWidth = width;
     mHeight = height;
     mFormat = format;
+    mMaxPreviewFps = maxPreviewFps;
     mDataSpace = dataSpace;
     mUsage = usage;
     mRequestCount = requestCount;
@@ -125,6 +144,8 @@
     mHistogramType = histogramType;
     mHistogramBins = std::move(histogramBins);
     mHistogramCounts = std::move(histogramCounts);
+    mDynamicRangeProfile = dynamicRangeProfile;
+    mStreamUseCase = streamUseCase;
 
     return OK;
 }
@@ -152,6 +173,11 @@
         return err;
     }
 
+    if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to write stream maxPreviewFps!", __FUNCTION__);
+        return err;
+    }
+
     if ((err = parcel->writeInt32(mDataSpace)) != OK) {
         ALOGE("%s: Failed to write stream dataSpace!", __FUNCTION__);
         return err;
@@ -202,6 +228,16 @@
         return err;
     }
 
+    if ((err = parcel->writeInt64(mDynamicRangeProfile)) != OK) {
+        ALOGE("%s: Failed to write dynamic range profile type", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt64(mStreamUseCase)) != OK) {
+        ALOGE("%s: Failed to write stream use case!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
@@ -223,11 +259,13 @@
         mApiLevel(0),
         mIsNdk(false),
         mLatencyMs(-1),
+        mMaxPreviewFps(0),
         mSessionType(0),
         mInternalReconfigure(0),
         mRequestCount(0),
         mResultErrorCount(0),
-        mDeviceError(false) {}
+        mDeviceError(false),
+        mVideoStabilizationMode(-1) {}
 
 CameraSessionStats::CameraSessionStats(const String16& cameraId,
         int facing, int newCameraState, const String16& clientName,
@@ -239,11 +277,13 @@
                 mApiLevel(apiLevel),
                 mIsNdk(isNdk),
                 mLatencyMs(latencyMs),
+                mMaxPreviewFps(0),
                 mSessionType(0),
                 mInternalReconfigure(0),
                 mRequestCount(0),
                 mResultErrorCount(0),
-                mDeviceError(0) {}
+                mDeviceError(0),
+                mVideoStabilizationMode(-1) {}
 
 status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
     if (parcel == NULL) {
@@ -295,6 +335,12 @@
         return err;
     }
 
+    float maxPreviewFps;
+    if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
+        return err;
+    }
+
     int32_t sessionType;
     if ((err = parcel->readInt32(&sessionType)) != OK) {
         ALOGE("%s: Failed to read session type from parcel", __FUNCTION__);
@@ -331,6 +377,18 @@
         return err;
     }
 
+    String16 userTag;
+    if ((err = parcel->readString16(&userTag)) != OK) {
+        ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    int32_t videoStabilizationMode;
+    if ((err = parcel->readInt32(&videoStabilizationMode)) != OK) {
+        ALOGE("%s: Failed to read video stabilization mode from parcel", __FUNCTION__);
+        return err;
+    }
+
     mCameraId = id;
     mFacing = facing;
     mNewCameraState = newCameraState;
@@ -338,12 +396,15 @@
     mApiLevel = apiLevel;
     mIsNdk = isNdk;
     mLatencyMs = latencyMs;
+    mMaxPreviewFps = maxPreviewFps;
     mSessionType = sessionType;
     mInternalReconfigure = internalReconfigure;
     mRequestCount = requestCount;
     mResultErrorCount = resultErrorCount;
     mDeviceError = deviceError;
     mStreamStats = std::move(streamStats);
+    mUserTag = userTag;
+    mVideoStabilizationMode = videoStabilizationMode;
 
     return OK;
 }
@@ -391,6 +452,11 @@
         return err;
     }
 
+    if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
+        ALOGE("%s: Failed to write maxPreviewFps!", __FUNCTION__);
+        return err;
+    }
+
     if ((err = parcel->writeInt32(mSessionType)) != OK) {
         ALOGE("%s: Failed to write session type!", __FUNCTION__);
         return err;
@@ -421,6 +487,15 @@
         return err;
     }
 
+    if ((err = parcel->writeString16(mUserTag)) != OK) {
+        ALOGE("%s: Failed to write user tag!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mVideoStabilizationMode)) != OK) {
+        ALOGE("%s: Failed to write video stabilization mode!", __FUNCTION__);
+        return err;
+    }
     return OK;
 }
 
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index af3c492..3473780 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include <camera/CameraUtils.h>
+#include <camera/camera2/OutputConfiguration.h>
 #include <media/hardware/HardwareAPI.h>
 
 #include <android-base/properties.h>
@@ -31,7 +32,7 @@
 const char *kCameraServiceDisabledProperty = "config.disable_cameraservice";
 
 status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
-                /*out*/int32_t* transform) {
+        int mirrorMode, /*out*/int32_t* transform) {
     ALOGV("%s", __FUNCTION__);
 
     if (transform == NULL) {
@@ -55,9 +56,18 @@
 
     int32_t& flags = *transform;
 
-    bool mirror = (entryFacing.data.u8[0] == ANDROID_LENS_FACING_FRONT);
+    int32_t mirror = 0;
+    if (mirrorMode == OutputConfiguration::MIRROR_MODE_AUTO &&
+            entryFacing.data.u8[0] == ANDROID_LENS_FACING_FRONT) {
+        mirror = NATIVE_WINDOW_TRANSFORM_FLIP_H;
+    } else if (mirrorMode == OutputConfiguration::MIRROR_MODE_H) {
+        mirror = NATIVE_WINDOW_TRANSFORM_FLIP_H;
+    } else if (mirrorMode == OutputConfiguration::MIRROR_MODE_V) {
+        mirror = NATIVE_WINDOW_TRANSFORM_FLIP_V;
+    }
+
     int orientation = entry.data.i32[0];
-    if (!mirror) {
+    if (mirror == 0) {
         switch (orientation) {
             case 0:
                 flags = 0;
@@ -77,25 +87,25 @@
                 return INVALID_OPERATION;
         }
     } else {
-        // Front camera needs to be horizontally flipped for mirror-like behavior.
+        // - Front camera needs to be horizontally flipped for mirror-like behavior.
+        // - Application-specified mirroring needs to be applied.
         // Note: Flips are applied before rotates; using XOR here as some of these flags are
         // composed in terms of other flip/rotation flags, and are not bitwise-ORable.
         switch (orientation) {
             case 0:
-                flags = NATIVE_WINDOW_TRANSFORM_FLIP_H;
+                flags = mirror;
                 break;
             case 90:
-                flags = NATIVE_WINDOW_TRANSFORM_FLIP_H ^
+                flags = mirror ^
                         NATIVE_WINDOW_TRANSFORM_ROT_270;
                 break;
             case 180:
-                flags = NATIVE_WINDOW_TRANSFORM_FLIP_H ^
+                flags = mirror ^
                         NATIVE_WINDOW_TRANSFORM_ROT_180;
                 break;
             case 270:
-                flags = NATIVE_WINDOW_TRANSFORM_FLIP_H ^
+                flags = mirror ^
                         NATIVE_WINDOW_TRANSFORM_ROT_90;
-
                 break;
             default:
                 ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
diff --git a/camera/OWNERS b/camera/OWNERS
index 1b548e4..385c163 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -5,4 +5,3 @@
 jchowdhary@google.com
 shuzhenwang@google.com
 ruchamk@google.com
-
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 24fa912..b37803a 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -760,7 +760,7 @@
     Mutex::Autolock al(sLock);
     if (sGlobalVendorTagDescriptorCache == NULL) {
         ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__);
-        return VENDOR_TAG_NAME_ERR;
+        return VENDOR_TAG_TYPE_ERR;
     }
     return sGlobalVendorTagDescriptorCache->getTagType(tag, id);
 }
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 78a77d4..1e748c7 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -173,6 +173,13 @@
 
     void setTorchMode(String cameraId, boolean enabled, IBinder clientBinder);
 
+    // Change the brightness level of the flash unit associated with cameraId to strengthLevel.
+    // If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
+    void turnOnTorchWithStrengthLevel(String cameraId, int strengthLevel, IBinder clientBinder);
+
+    // Get the brightness level of the flash unit associated with cameraId.
+    int getTorchStrengthLevel(String cameraId);
+
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
      *
@@ -180,6 +187,8 @@
      */
     const int EVENT_NONE = 0;
     const int EVENT_USER_SWITCHED = 1; // The argument is the set of new foreground user IDs.
+    const int EVENT_USB_DEVICE_ATTACHED = 2; // The argument is the deviceId and vendorId
+    const int EVENT_USB_DEVICE_DETACHED = 3; // The argument is the deviceId and vendorId
     oneway void notifySystemEvent(int eventId, in int[] args);
 
     /**
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index c54813c..5f17f5b 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -83,6 +83,8 @@
 
     oneway void onTorchStatusChanged(int status, String cameraId);
 
+    oneway void onTorchStrengthLevelChanged(String cameraId, int newTorchStrength);
+
     /**
      * Notify registered clients about camera access priority changes.
      * Clients which were previously unable to open a certain camera device
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index f5d0120..88783fb 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -44,4 +44,9 @@
      * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_270}).
      */
     int getRotateAndCropOverride(String packageName, int lensFacing, int userId);
+
+    /**
+     * Checks if the camera has been disabled via device policy.
+     */
+    boolean isCameraDisabled();
 }
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index ebc09d7..7a8a4ba 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -146,6 +146,20 @@
         mSurfaceIdxList.push_back(surfaceIdx);
     }
 
+    int32_t hasUserTag;
+    if ((err = parcel->readInt32(&hasUserTag)) != OK) {
+        ALOGE("%s: Failed to read user tag availability flag", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    if (hasUserTag) {
+        String16 userTag;
+        if ((err = parcel->readString16(&userTag)) != OK) {
+            ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        mUserTag = String8(userTag).c_str();
+    }
+
     return OK;
 }
 
@@ -213,6 +227,14 @@
             return err;
         }
     }
+
+    if (mUserTag.empty()) {
+        parcel->writeInt32(0);
+    } else {
+        parcel->writeInt32(1);
+        parcel->writeString16(String16(mUserTag.c_str()));
+    }
+
     return OK;
 }
 
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 2bccd87..11d4960 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -23,6 +23,7 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <binder/Parcel.h>
 #include <gui/view/Surface.h>
+#include <system/camera_metadata.h>
 #include <utils/String8.h>
 
 namespace android {
@@ -76,6 +77,22 @@
     return mSensorPixelModesUsed;
 }
 
+int64_t OutputConfiguration::getDynamicRangeProfile() const {
+    return mDynamicRangeProfile;
+}
+
+int64_t OutputConfiguration::getStreamUseCase() const {
+    return mStreamUseCase;
+}
+
+int OutputConfiguration::getTimestampBase() const {
+    return mTimestampBase;
+}
+
+int OutputConfiguration::getMirrorMode() const {
+    return mMirrorMode;
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -84,7 +101,11 @@
         mHeight(0),
         mIsDeferred(false),
         mIsShared(false),
-        mIsMultiResolution(false) {
+        mIsMultiResolution(false),
+        mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+        mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+        mTimestampBase(TIMESTAMP_BASE_DEFAULT),
+        mMirrorMode(MIRROR_MODE_AUTO) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -165,6 +186,30 @@
         ALOGE("%s: Failed to read sensor pixel mode(s) from parcel", __FUNCTION__);
         return err;
     }
+    int64_t dynamicProfile;
+    if ((err = parcel->readInt64(&dynamicProfile)) != OK) {
+        ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    if ((err = parcel->readInt64(&streamUseCase)) != OK) {
+        ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int timestampBase = TIMESTAMP_BASE_DEFAULT;
+    if ((err = parcel->readInt32(&timestampBase)) != OK) {
+        ALOGE("%s: Failed to read timestamp base from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int mirrorMode = MIRROR_MODE_AUTO;
+    if ((err = parcel->readInt32(&mirrorMode)) != OK) {
+        ALOGE("%s: Failed to read mirroring mode from parcel", __FUNCTION__);
+        return err;
+    }
+
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -173,6 +218,9 @@
     mIsDeferred = isDeferred != 0;
     mIsShared = isShared != 0;
     mIsMultiResolution = isMultiResolution != 0;
+    mStreamUseCase = streamUseCase;
+    mTimestampBase = timestampBase;
+    mMirrorMode = mirrorMode;
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
@@ -181,10 +229,14 @@
     }
 
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
+    mDynamicRangeProfile = dynamicProfile;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
-          " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
-          mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
+          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
+          ", timestampBase = %d, mirrorMode = %d",
+          __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
+          String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
+          mMirrorMode);
 
     return err;
 }
@@ -199,6 +251,10 @@
     mIsShared = isShared;
     mPhysicalCameraId = physicalId;
     mIsMultiResolution = false;
+    mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+    mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+    mTimestampBase = TIMESTAMP_BASE_DEFAULT;
+    mMirrorMode = MIRROR_MODE_AUTO;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -207,7 +263,11 @@
     int width, int height, bool isShared)
   : mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
-    mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false) { }
+    mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
+    mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+    mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+    mTimestampBase(TIMESTAMP_BASE_DEFAULT),
+    mMirrorMode(MIRROR_MODE_AUTO) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -254,6 +314,18 @@
     err = parcel->writeParcelableVector(mSensorPixelModesUsed);
     if (err != OK) return err;
 
+    err = parcel->writeInt64(mDynamicRangeProfile);
+    if (err != OK) return err;
+
+    err = parcel->writeInt64(mStreamUseCase);
+    if (err != OK) return err;
+
+    err = parcel->writeInt32(mTimestampBase);
+    if (err != OK) return err;
+
+    err = parcel->writeInt32(mMirrorMode);
+    if (err != OK) return err;
+
     return OK;
 }
 
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8ca8920..094a3c1 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -43,6 +43,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V1-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index e156994..8e53968 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -85,11 +85,17 @@
      */
     std::vector<String8> unavailablePhysicalIds;
 
+    /**
+     * Client package name if camera is open, otherwise not applicable
+     */
+    String8 clientPackage;
+
     virtual status_t writeToParcel(android::Parcel* parcel) const;
     virtual status_t readFromParcel(const android::Parcel* parcel);
 
-    CameraStatus(String8 id, int32_t s, const std::vector<String8>& unavailSubIds) :
-            cameraId(id), status(s), unavailablePhysicalIds(unavailSubIds) {}
+    CameraStatus(String8 id, int32_t s, const std::vector<String8>& unavailSubIds,
+            const String8& clientPkg) : cameraId(id), status(s),
+            unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg) {}
     CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
 };
 
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index c398aca..aaa88b2 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -19,6 +19,8 @@
 
 #include <binder/Parcelable.h>
 
+#include <camera/CameraMetadata.h>
+
 namespace android {
 namespace hardware {
 
@@ -35,6 +37,7 @@
     int mWidth;
     int mHeight;
     int mFormat;
+    float mMaxPreviewFps;
     int mDataSpace;
     int64_t mUsage;
 
@@ -60,16 +63,26 @@
     // size(mHistogramBins) + 1 = size(mHistogramCounts)
     std::vector<int64_t> mHistogramCounts;
 
+    // Dynamic range profile
+    int64_t mDynamicRangeProfile;
+    // Stream use case
+    int64_t mStreamUseCase;
+
     CameraStreamStats() :
-            mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
+            mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
             mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
-            mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN) {}
-    CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
-            int maxHalBuffers, int maxAppBuffers)
-            : mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
-              mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
-              mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
-              mHistogramType(HISTOGRAM_TYPE_UNKNOWN) {}
+            mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
+            mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+            mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+    CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
+            int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
+            int streamUseCase)
+            : mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
+              mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
+              mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
+              mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
+              mDynamicRangeProfile(dynamicRangeProfile),
+              mStreamUseCase(streamUseCase) {}
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
     virtual status_t writeToParcel(android::Parcel* parcel) const override;
@@ -111,6 +124,7 @@
     bool mIsNdk;
     // latency in ms for camera open, close, or session creation.
     int mLatencyMs;
+    float mMaxPreviewFps;
 
     // Session info and statistics
     int mSessionType;
@@ -122,6 +136,8 @@
     // Whether the device runs into an error state
     bool mDeviceError;
     std::vector<CameraStreamStats> mStreamStats;
+    String16 mUserTag;
+    int mVideoStabilizationMode;
 
     // Constructors
     CameraSessionStats();
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index a397ccd..31d25e7 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -37,10 +37,13 @@
          * metadata.  This is based on the sensor orientation and lens facing
          * attributes of the camera device.
          *
+         * If mirrorMode is not AUTO, it will be used to override the lens
+         * facing based mirror.
+         *
          * Returns OK on success, or a negative error code.
          */
         static status_t getRotationTransform(const CameraMetadata& staticInfo,
-                /*out*/int32_t* transform);
+                int mirrorMode, /*out*/int32_t* transform);
 
         /**
          * Check if the image data is VideoNativeHandleMetadata, that contains a native handle.
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 506abab..28dbc7c 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -63,6 +63,8 @@
 
     void*                   mContext; // arbitrary user context from NDK apps, null for java apps
 
+    std::string             mUserTag; // The string representation of object passed into setTag.
+
     /**
      * Keep impl up-to-date with CaptureRequest.java in frameworks/base
      */
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index f80ed3a..b842885 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -38,16 +38,34 @@
         SURFACE_TYPE_SURFACE_VIEW = 0,
         SURFACE_TYPE_SURFACE_TEXTURE = 1
     };
+    enum TimestampBaseType {
+        TIMESTAMP_BASE_DEFAULT = 0,
+        TIMESTAMP_BASE_SENSOR = 1,
+        TIMESTAMP_BASE_MONOTONIC = 2,
+        TIMESTAMP_BASE_REALTIME = 3,
+        TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4
+    };
+    enum MirrorModeType {
+        MIRROR_MODE_AUTO = 0,
+        MIRROR_MODE_NONE = 1,
+        MIRROR_MODE_H = 2,
+        MIRROR_MODE_V = 3,
+    };
+
     const std::vector<sp<IGraphicBufferProducer>>& getGraphicBufferProducers() const;
     int                        getRotation() const;
     int                        getSurfaceSetID() const;
     int                        getSurfaceType() const;
     int                        getWidth() const;
     int                        getHeight() const;
+    int64_t                    getDynamicRangeProfile() const;
     bool                       isDeferred() const;
     bool                       isShared() const;
     String16                   getPhysicalCameraId() const;
     bool                       isMultiResolution() const;
+    int64_t                    getStreamUseCase() const;
+    int                        getTimestampBase() const;
+    int                        getMirrorMode() const;
 
     // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
     const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -89,7 +107,11 @@
                 gbpsEqual(other) &&
                 mPhysicalCameraId == other.mPhysicalCameraId &&
                 mIsMultiResolution == other.mIsMultiResolution &&
-                sensorPixelModesUsedEqual(other));
+                sensorPixelModesUsedEqual(other) &&
+                mDynamicRangeProfile == other.mDynamicRangeProfile &&
+                mStreamUseCase == other.mStreamUseCase &&
+                mTimestampBase == other.mTimestampBase &&
+                mMirrorMode == other.mMirrorMode);
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -126,6 +148,18 @@
         if (!sensorPixelModesUsedEqual(other)) {
             return sensorPixelModesUsedLessThan(other);
         }
+        if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
+            return mDynamicRangeProfile < other.mDynamicRangeProfile;
+        }
+        if (mStreamUseCase != other.mStreamUseCase) {
+            return mStreamUseCase < other.mStreamUseCase;
+        }
+        if (mTimestampBase != other.mTimestampBase) {
+            return mTimestampBase < other.mTimestampBase;
+        }
+        if (mMirrorMode != other.mMirrorMode) {
+            return mMirrorMode < other.mMirrorMode;
+        }
         return gbpsLessThan(other);
     }
 
@@ -150,6 +184,10 @@
     String16                   mPhysicalCameraId;
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
+    int64_t                    mDynamicRangeProfile;
+    int64_t                    mStreamUseCase;
+    int                        mTimestampBase;
+    int                        mMirrorMode;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 1ac8482..9c98778 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -29,6 +29,7 @@
 #include "impl/ACameraCaptureSession.h"
 
 #include "impl/ACameraCaptureSession.inc"
+#include "NdkCameraCaptureSession.inc"
 
 using namespace android;
 
@@ -72,22 +73,16 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     ATRACE_CALL();
-    if (session == nullptr || requests == nullptr || numRequests < 1) {
-        ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
-                __FUNCTION__, session, numRequests, requests);
-        return ACAMERA_ERROR_INVALID_PARAMETER;
-    }
+    return captureTemplate(session, cbs, numRequests, requests, captureSequenceId);
+}
 
-    if (session->isClosed()) {
-        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
-        if (captureSequenceId != nullptr) {
-            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
-        }
-        return ACAMERA_ERROR_SESSION_CLOSED;
-    }
-
-    return session->capture(
-            cbs, numRequests, requests, captureSequenceId);
+EXPORT
+camera_status_t ACameraCaptureSession_captureV2(
+        ACameraCaptureSession* session, /*optional*/ACameraCaptureSession_captureCallbacksV2* cbs,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    return captureTemplate(session, cbs, numRequests, requests, captureSequenceId);
 }
 
 EXPORT
@@ -97,22 +92,26 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     ATRACE_CALL();
-    if (session == nullptr || requests == nullptr || numRequests < 1) {
-        ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
-                __FUNCTION__, session, numRequests, requests);
-        return ACAMERA_ERROR_INVALID_PARAMETER;
-    }
+    return captureTemplate(session, lcbs, numRequests, requests, captureSequenceId);
+}
 
-    if (session->isClosed()) {
-        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
-        if (captureSequenceId) {
-            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
-        }
-        return ACAMERA_ERROR_SESSION_CLOSED;
-    }
+EXPORT
+camera_status_t ACameraCaptureSession_logicalCamera_captureV2(
+        ACameraCaptureSession* session,
+        /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    return captureTemplate(session, lcbs, numRequests, requests, captureSequenceId);
+}
 
-    return session->capture(
-            lcbs, numRequests, requests, captureSequenceId);
+EXPORT
+camera_status_t ACameraCaptureSession_setRepeatingRequestV2(
+        ACameraCaptureSession* session, /*optional*/ACameraCaptureSession_captureCallbacksV2* cbs,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    return setRepeatingRequestTemplate(session, cbs, numRequests, requests, captureSequenceId);
 }
 
 EXPORT
@@ -121,23 +120,10 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     ATRACE_CALL();
-    if (session == nullptr || requests == nullptr || numRequests < 1) {
-        ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
-                __FUNCTION__, session, numRequests, requests);
-        return ACAMERA_ERROR_INVALID_PARAMETER;
-    }
-
-    if (session->isClosed()) {
-        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
-        if (captureSequenceId) {
-            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
-        }
-        return ACAMERA_ERROR_SESSION_CLOSED;
-    }
-
-    return session->setRepeatingRequest(cbs, numRequests, requests, captureSequenceId);
+    return setRepeatingRequestTemplate(session, cbs, numRequests, requests, captureSequenceId);
 }
 
+
 EXPORT
 camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequest(
         ACameraCaptureSession* session,
@@ -145,21 +131,18 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) {
     ATRACE_CALL();
-    if (session == nullptr || requests == nullptr || numRequests < 1) {
-        ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
-                __FUNCTION__, session, numRequests, requests);
-        return ACAMERA_ERROR_INVALID_PARAMETER;
-    }
+    return setRepeatingRequestTemplate(session, lcbs, numRequests, requests, captureSequenceId);
+}
 
-    if (session->isClosed()) {
-        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
-        if (captureSequenceId) {
-            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
-        }
-        return ACAMERA_ERROR_SESSION_CLOSED;
-    }
 
-    return session->setRepeatingRequest(lcbs, numRequests, requests, captureSequenceId);
+EXPORT
+camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequestV2(
+        ACameraCaptureSession* session,
+        /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    return setRepeatingRequestTemplate(session, lcbs, numRequests, requests, captureSequenceId);
 }
 
 EXPORT
diff --git a/camera/ndk/NdkCameraCaptureSession.inc b/camera/ndk/NdkCameraCaptureSession.inc
new file mode 100644
index 0000000..258e20d
--- /dev/null
+++ b/camera/ndk/NdkCameraCaptureSession.inc
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "impl/ACameraCaptureSession.h"
+
+#include <camera/NdkCameraCaptureSession.h>
+
+using namespace android;
+
+template <class CallbackType>
+camera_status_t captureTemplate(
+        ACameraCaptureSession* session,
+        /*optional*/CallbackType* cbs,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    if (session == nullptr || requests == nullptr || numRequests < 1) {
+        ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
+                __FUNCTION__, session, numRequests, requests);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        if (captureSequenceId) {
+            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+        }
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    return session->capture(
+            cbs, numRequests, requests, captureSequenceId);
+}
+
+template <class CallbackType>
+camera_status_t setRepeatingRequestTemplate(
+        ACameraCaptureSession* session,
+        /*optional*/CallbackType* cbs,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) {
+    ATRACE_CALL();
+    if (session == nullptr || requests == nullptr || numRequests < 1) {
+        ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
+                __FUNCTION__, session, numRequests, requests);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+
+    if (session->isClosed()) {
+        ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+        if (captureSequenceId) {
+            *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+        }
+        return ACAMERA_ERROR_SESSION_CLOSED;
+    }
+
+    return session->setRepeatingRequest(cbs, numRequests, requests, captureSequenceId);
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index dd652c7..7997768 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -26,8 +26,6 @@
 #include "ACaptureRequest.h"
 #include "ACameraCaptureSession.h"
 
-#include "ACameraCaptureSession.inc"
-
 ACameraDevice::~ACameraDevice() {
     mDevice->stopLooperAndDisconnect();
 }
@@ -913,6 +911,7 @@
         case kWhatOnError:
         case kWhatSessionStateCb:
         case kWhatCaptureStart:
+        case kWhatCaptureStart2:
         case kWhatCaptureResult:
         case kWhatLogicalCaptureResult:
         case kWhatCaptureFail:
@@ -985,6 +984,7 @@
         }
         case kWhatSessionStateCb:
         case kWhatCaptureStart:
+        case kWhatCaptureStart2:
         case kWhatCaptureResult:
         case kWhatLogicalCaptureResult:
         case kWhatCaptureFail:
@@ -1004,6 +1004,7 @@
             sp<CaptureRequest> requestSp = nullptr;
             switch (msg->what()) {
                 case kWhatCaptureStart:
+                case kWhatCaptureStart2:
                 case kWhatCaptureResult:
                 case kWhatLogicalCaptureResult:
                 case kWhatCaptureFail:
@@ -1055,6 +1056,35 @@
                     freeACaptureRequest(request);
                     break;
                 }
+                case kWhatCaptureStart2:
+                {
+                    ACameraCaptureSession_captureCallback_startV2 onStart2;
+                    found = msg->findPointer(kCallbackFpKey, (void**) &onStart2);
+                    if (!found) {
+                        ALOGE("%s: Cannot find capture startV2 callback!", __FUNCTION__);
+                        return;
+                    }
+                    if (onStart2 == nullptr) {
+                        return;
+                    }
+                    int64_t timestamp;
+                    found = msg->findInt64(kTimeStampKey, &timestamp);
+                    if (!found) {
+                        ALOGE("%s: Cannot find timestamp!", __FUNCTION__);
+                        return;
+                    }
+                    int64_t frameNumber;
+                    found = msg->findInt64(kFrameNumberKey, &frameNumber);
+                    if (!found) {
+                        ALOGE("%s: Cannot find frame number!", __FUNCTION__);
+                        return;
+                    }
+
+                    ACaptureRequest* request = allocateACaptureRequest(requestSp, mId);
+                    (*onStart2)(context, session.get(), request, timestamp, frameNumber);
+                    freeACaptureRequest(request);
+                    break;
+                }
                 case kWhatCaptureResult:
                 {
                     ACameraCaptureSession_captureCallback_result onResult;
@@ -1285,7 +1315,8 @@
         ACameraCaptureSession_captureCallbacks* cbs) :
         mSession(session), mRequests(requests),
         mIsRepeating(isRepeating),
-        mIsLogicalCameraCallback(false) {
+        mIsLogicalCameraCallback(false),
+        mIs2Callback(false) {
     initCaptureCallbacks(cbs);
 
     if (cbs != nullptr) {
@@ -1301,7 +1332,8 @@
         ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
         mSession(session), mRequests(requests),
         mIsRepeating(isRepeating),
-        mIsLogicalCameraCallback(true) {
+        mIsLogicalCameraCallback(true),
+        mIs2Callback(false) {
     initCaptureCallbacks(lcbs);
 
     if (lcbs != nullptr) {
@@ -1310,6 +1342,40 @@
     }
 }
 
+CameraDevice::CallbackHolder::CallbackHolder(
+        sp<ACameraCaptureSession>          session,
+        const Vector<sp<CaptureRequest> >& requests,
+        bool                               isRepeating,
+        ACameraCaptureSession_captureCallbacksV2* cbs) :
+        mSession(session), mRequests(requests),
+        mIsRepeating(isRepeating),
+        mIsLogicalCameraCallback(false),
+        mIs2Callback(true) {
+    initCaptureCallbacksV2(cbs);
+
+    if (cbs != nullptr) {
+        mOnCaptureCompleted = cbs->onCaptureCompleted;
+        mOnCaptureFailed = cbs->onCaptureFailed;
+    }
+}
+
+CameraDevice::CallbackHolder::CallbackHolder(
+        sp<ACameraCaptureSession>          session,
+        const Vector<sp<CaptureRequest> >& requests,
+        bool                               isRepeating,
+        ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs) :
+        mSession(session), mRequests(requests),
+        mIsRepeating(isRepeating),
+        mIsLogicalCameraCallback(true),
+        mIs2Callback(true) {
+    initCaptureCallbacksV2(lcbs);
+
+    if (lcbs != nullptr) {
+        mOnLogicalCameraCaptureCompleted = lcbs->onLogicalCameraCaptureCompleted;
+        mOnLogicalCameraCaptureFailed = lcbs->onLogicalCameraCaptureFailed;
+    }
+}
+
 void
 CameraDevice::checkRepeatingSequenceCompleteLocked(
     const int sequenceId, const int64_t lastFrameNumber) {
@@ -1536,7 +1602,6 @@
         const CaptureResultExtras& resultExtras,
         int64_t timestamp) {
     binder::Status ret = binder::Status::ok();
-
     sp<CameraDevice> dev = mDevice.promote();
     if (dev == nullptr) {
         return ret; // device has been closed
@@ -1551,11 +1616,14 @@
 
     int sequenceId = resultExtras.requestId;
     int32_t burstId = resultExtras.burstId;
+    int64_t frameNumber = resultExtras.frameNumber;
 
     auto it = dev->mSequenceCallbackMap.find(sequenceId);
     if (it != dev->mSequenceCallbackMap.end()) {
         CallbackHolder cbh = (*it).second;
+        bool v2Callback = cbh.mIs2Callback;
         ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
+        ACameraCaptureSession_captureCallback_startV2 onStart2 = cbh.mOnCaptureStarted2;
         sp<ACameraCaptureSession> session = cbh.mSession;
         if ((size_t) burstId >= cbh.mRequests.size()) {
             ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1563,12 +1631,19 @@
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
         }
         sp<CaptureRequest> request = cbh.mRequests[burstId];
-        sp<AMessage> msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+        sp<AMessage> msg = nullptr;
+        if (v2Callback) {
+            msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
+            msg->setPointer(kCallbackFpKey, (void*) onStart2);
+        } else {
+            msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+            msg->setPointer(kCallbackFpKey, (void *)onStart);
+        }
         msg->setPointer(kContextKey, cbh.mContext);
         msg->setObject(kSessionSpKey, session);
-        msg->setPointer(kCallbackFpKey, (void*) onStart);
         msg->setObject(kCaptureRequestKey, request);
         msg->setInt64(kTimeStampKey, timestamp);
+        msg->setInt64(kFrameNumberKey, frameNumber);
         dev->postSessionMsgAndCleanup(msg);
     }
     return ret;
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 344d964..17988fe 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -215,6 +215,7 @@
         kWhatSessionStateCb,   // onReady, onActive
         // Capture callbacks
         kWhatCaptureStart,     // onCaptureStarted
+        kWhatCaptureStart2,     // onCaptureStarted
         kWhatCaptureResult,    // onCaptureProgressed, onCaptureCompleted
         kWhatLogicalCaptureResult, // onLogicalCameraCaptureCompleted
         kWhatCaptureFail,      // onCaptureFailed
@@ -294,11 +295,18 @@
                        const Vector<sp<CaptureRequest> >& requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
-
-        template <class T>
-        void initCaptureCallbacks(T* cbs) {
+        CallbackHolder(sp<ACameraCaptureSession>          session,
+                       const Vector<sp<CaptureRequest> >& requests,
+                       bool                               isRepeating,
+                       ACameraCaptureSession_captureCallbacksV2* cbs);
+        CallbackHolder(sp<ACameraCaptureSession>          session,
+                       const Vector<sp<CaptureRequest> >& requests,
+                       bool                               isRepeating,
+                       ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs);
+        void clearCallbacks() {
             mContext = nullptr;
             mOnCaptureStarted = nullptr;
+            mOnCaptureStarted2 = nullptr;
             mOnCaptureProgressed = nullptr;
             mOnCaptureCompleted = nullptr;
             mOnLogicalCameraCaptureCompleted = nullptr;
@@ -307,6 +315,24 @@
             mOnCaptureSequenceCompleted = nullptr;
             mOnCaptureSequenceAborted = nullptr;
             mOnCaptureBufferLost = nullptr;
+        }
+
+        template <class T>
+        void initCaptureCallbacksV2(T* cbs) {
+            clearCallbacks();
+            if (cbs != nullptr) {
+                mContext = cbs->context;
+                mOnCaptureStarted2 = cbs->onCaptureStarted;
+                mOnCaptureProgressed = cbs->onCaptureProgressed;
+                mOnCaptureSequenceCompleted = cbs->onCaptureSequenceCompleted;
+                mOnCaptureSequenceAborted = cbs->onCaptureSequenceAborted;
+                mOnCaptureBufferLost = cbs->onCaptureBufferLost;
+            }
+        }
+
+        template <class T>
+        void initCaptureCallbacks(T* cbs) {
+            clearCallbacks();
             if (cbs != nullptr) {
                 mContext = cbs->context;
                 mOnCaptureStarted = cbs->onCaptureStarted;
@@ -320,9 +346,11 @@
         Vector<sp<CaptureRequest> > mRequests;
         const bool                  mIsRepeating;
         const bool                  mIsLogicalCameraCallback;
+        const bool                  mIs2Callback;
 
         void*                       mContext;
         ACameraCaptureSession_captureCallback_start mOnCaptureStarted;
+        ACameraCaptureSession_captureCallback_startV2 mOnCaptureStarted2;
         ACameraCaptureSession_captureCallback_result mOnCaptureProgressed;
         ACameraCaptureSession_captureCallback_result mOnCaptureCompleted;
         ACameraCaptureSession_logicalCamera_captureCallback_result mOnLogicalCameraCaptureCompleted;
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 95ef2b2..5892f1a 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -189,8 +189,12 @@
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         AutoMutex lock(cm->mLock);
+        std::vector<String8> cameraIdList;
         for (auto& pair : cm->mDeviceStatusMap) {
-            const String8 &cameraId = pair.first;
+            cameraIdList.push_back(pair.first);
+        }
+
+        for (String8 cameraId : cameraIdList) {
             cm->onStatusChangedLocked(
                     CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
         }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index ccbfaa9..d53d809 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -95,6 +95,9 @@
         virtual binder::Status onTorchStatusChanged(int32_t, const String16&) {
             return binder::Status::ok();
         }
+        virtual binder::Status onTorchStrengthLevelChanged(const String16&, int32_t) {
+            return binder::Status::ok();
+        }
 
         virtual binder::Status onCameraAccessPrioritiesChanged();
         virtual binder::Status onCameraOpened(const String16&, const String16&) {
@@ -139,6 +142,8 @@
             return !(*this == other);
         }
         bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
@@ -152,6 +157,7 @@
             }
             if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
             return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
         }
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index dab2fef..05124c0 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -182,7 +182,7 @@
         int64_t format = entry.data.i64[i + STREAM_FORMAT_OFFSET];
         int64_t width = entry.data.i64[i + STREAM_WIDTH_OFFSET];
         int64_t height = entry.data.i64[i + STREAM_HEIGHT_OFFSET];
-        int64_t duration = entry.data.i32[i + STREAM_DURATION_OFFSET];
+        int64_t duration = entry.data.i64[i + STREAM_DURATION_OFFSET];
 
         // Leave the unfiltered format in so apps depending on previous wrong
         // filter behavior continue to work
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 2b7f040..b0fd00c 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -811,6 +811,184 @@
         int numRequests, ACaptureRequest** requests,
         /*optional*/int* captureSequenceId) __INTRODUCED_IN(29);
 
+/**
+ * The definition of camera capture start callback. The same as
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}, except that
+ * it has the frame number of the capture as well.
+ *
+ * @param context The optional application context provided by user in
+ *                {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request that is starting. Note that this pointer points to a copy of
+ *                capture request sent by application, so the address is different to what
+ *                application sent but the content will match. This request will be freed by
+ *                framework immediately after this callback returns.
+ * @param timestamp The timestamp when the capture is started. This timestamp will match
+ *                  {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
+ *                  {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
+ * @param frameNumber the frame number of the capture started
+ */
+typedef void (*ACameraCaptureSession_captureCallback_startV2)(
+        void* context, ACameraCaptureSession* session,
+        const ACaptureRequest* request, int64_t timestamp, int64_t frameNumber);
+/**
+ * This has the same functionality as ACameraCaptureSession_captureCallbacks,
+ * with the exception that captureCallback_startV2 callback is
+ * used, instead of captureCallback_start, to support retrieving the frame number.
+ */
+typedef struct ACameraCaptureSession_captureCallbacksV2 {
+    /**
+     * Same as ACameraCaptureSession_captureCallbacks
+     */
+    void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted},
+     * except that it has the frame number of the capture added in the parameter
+     * list.
+     */
+    ACameraCaptureSession_captureCallback_startV2         onCaptureStarted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
+    ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted}.
+     */
+    ACameraCaptureSession_captureCallback_result        onCaptureCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureFailed}.
+     */
+    ACameraCaptureSession_captureCallback_failed        onCaptureFailed;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
+     */
+    ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
+    ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
+    ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
+
+
+} ACameraCaptureSession_captureCallbacksV2;
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logicalCamera_captureCallbacks,
+ * with the exception that an captureCallback_startV2 callback is
+ * used, instead of captureCallback_start, to support retrieving frame number.
+ */
+typedef struct ACameraCaptureSession_logicalCamera_captureCallbacksV2 {
+    /**
+     * Same as ACameraCaptureSession_captureCallbacks
+     */
+    void*                                               context;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted},
+     * except that it has the frame number of the capture added in the parameter
+     * list.
+     */
+    ACameraCaptureSession_captureCallback_startV2         onCaptureStarted;
+
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+     */
+    ACameraCaptureSession_captureCallback_result        onCaptureProgressed;
+
+    /**
+     * Same as
+     * {@link ACameraCaptureSession_logicalCamera_captureCallbacks#onLogicalCaptureCompleted}.
+     */
+    ACameraCaptureSession_logicalCamera_captureCallback_result onLogicalCameraCaptureCompleted;
+
+    /**
+     * This callback is called instead of {@link onLogicalCameraCaptureCompleted} when the
+     * camera device failed to produce a capture result for the
+     * request.
+     *
+     * <p>Other requests are unaffected, and some or all image buffers from
+     * the capture may have been pushed to their respective output
+     * streams.</p>
+     *
+     * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+     * submitted, but the contents the ACaptureRequest will match what application submitted.</p>
+     *
+     * @see ALogicalCameraCaptureFailure
+     */
+    ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
+     */
+    ACameraCaptureSession_captureCallback_sequenceEnd   onCaptureSequenceCompleted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+     */
+    ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+    /**
+     * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+     */
+    ACameraCaptureSession_captureCallback_bufferLost    onCaptureBufferLost;
+
+} ACameraCaptureSession_logicalCamera_captureCallbacksV2;
+
+/**
+ * This has the same functionality as ACameraCaptureSession_capture, with added
+ * support for v2 of camera callbacks, where the onCaptureStarted callback
+ * adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_captureV2(
+        ACameraCaptureSession* session,
+        /*optional*/ACameraCaptureSession_captureCallbacksV2* callbacks,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logical_setRepeatingRequest, with added
+ * support for v2 of logical multi-camera callbacks where the onCaptureStarted
+ * callback adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_setRepeatingRequestV2(
+        ACameraCaptureSession* session,
+        /*optional*/ACameraCaptureSession_captureCallbacksV2* callbacks,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logical_capture, with added
+ * support for v2 of logical multi-camera  callbacks where the onCaptureStarted callback
+ * adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_logicalCamera_captureV2(
+        ACameraCaptureSession* session,
+        /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* callbacks,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logical_setRepeatingRequest, with added
+ * support for v2 of logical multi-camera callbacks where the onCaptureStarted
+ * callback adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequestV2(
+        ACameraCaptureSession* session,
+        /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* callbacks,
+        int numRequests, ACaptureRequest** requests,
+        /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 816303c..b6f8552 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -72,6 +72,8 @@
     ACAMERA_DISTORTION_CORRECTION,
     ACAMERA_HEIC,
     ACAMERA_HEIC_INFO,
+    ACAMERA_AUTOMOTIVE,
+    ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -115,6 +117,8 @@
                                                                 << 16,
     ACAMERA_HEIC_START             = ACAMERA_HEIC              << 16,
     ACAMERA_HEIC_INFO_START        = ACAMERA_HEIC_INFO         << 16,
+    ACAMERA_AUTOMOTIVE_START       = ACAMERA_AUTOMOTIVE        << 16,
+    ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -517,6 +521,14 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata.  If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>When setting the AE metering regions, the application must consider the additional
+     * crop resulted from the aspect ratio differences between the preview stream and
+     * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+     * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+     * the boundary of AE regions will be [0, y_crop] and
+     * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+     * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+     * mismatch.</p>
      * <p>Starting from API level 30, the coordinate system of activeArraySize or
      * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
      * pre-zoom field of view. This means that the same aeRegions values at different
@@ -718,6 +730,14 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata. If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>When setting the AF metering regions, the application must consider the additional
+     * crop resulted from the aspect ratio differences between the preview stream and
+     * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+     * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+     * the boundary of AF regions will be [0, y_crop] and
+     * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+     * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+     * mismatch.</p>
      * <p>Starting from API level 30, the coordinate system of activeArraySize or
      * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
      * pre-zoom field of view. This means that the same afRegions values at different
@@ -838,7 +858,7 @@
      * routine is enabled, overriding the application's selected
      * ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS and
      * ACAMERA_COLOR_CORRECTION_MODE. Note that when ACAMERA_CONTROL_AE_MODE
-     * is OFF, the behavior of AWB is device dependent. It is recommened to
+     * is OFF, the behavior of AWB is device dependent. It is recommended to
      * also set AWB mode to OFF or lock AWB by using ACAMERA_CONTROL_AWB_LOCK before
      * setting AE mode to OFF.</p>
      * <p>When set to the OFF mode, the camera device's auto-white balance
@@ -913,6 +933,14 @@
      * region and output only the intersection rectangle as the metering region in the result
      * metadata.  If the region is entirely outside the crop region, it will be ignored and
      * not reported in the result metadata.</p>
+     * <p>When setting the AWB metering regions, the application must consider the additional
+     * crop resulted from the aspect ratio differences between the preview stream and
+     * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+     * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+     * the boundary of AWB regions will be [0, y_crop] and
+     * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+     * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+     * mismatch.</p>
      * <p>Starting from API level 30, the coordinate system of activeArraySize or
      * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
      * pre-zoom field of view. This means that the same awbRegions values at different
@@ -961,13 +989,15 @@
      *
      * <p>This control (except for MANUAL) is only effective if
      * <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
-     * <p>All intents are supported by all devices, except that:
-     *   * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
-     * PRIVATE_REPROCESSING or YUV_REPROCESSING.
-     *   * MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
-     * MANUAL_SENSOR.
-     *   * MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
-     * MOTION_TRACKING.</p>
+     * <p>All intents are supported by all devices, except that:</p>
+     * <ul>
+     * <li>ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+     * PRIVATE_REPROCESSING or YUV_REPROCESSING.</li>
+     * <li>MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+     * MANUAL_SENSOR.</li>
+     * <li>MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+     * MOTION_TRACKING.</li>
+     * </ul>
      *
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -1090,6 +1120,15 @@
      * (ACAMERA_LENS_OPTICAL_STABILIZATION_MODE), turning both modes on may
      * produce undesirable interaction, so it is recommended not to enable
      * both at the same time.</p>
+     * <p>If video stabilization is set to "PREVIEW_STABILIZATION",
+     * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE is overridden. The camera sub-system may choose
+     * to turn on hardware based image stabilization in addition to software based stabilization
+     * if it deems that appropriate.
+     * This key may be a part of the available session keys, which camera clients may
+     * query via
+     * {@link ACameraManager_getCameraCharacteristics }.
+     * If this is the case, changing this key over the life-time of a capture session may
+     * cause delays / glitches.</p>
      *
      * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
      * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
@@ -1449,7 +1488,7 @@
      * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
      * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
      * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
-     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
+     * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged    | CONVERGED      | Converged after a precapture sequences canceled, transient states are skipped by camera device.
      * CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
      * FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.</p>
      *
@@ -1685,7 +1724,7 @@
      * </ul></p>
      *
      * <p>Devices support post RAW sensitivity boost  will advertise
-     * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
+     * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controlling
      * post RAW sensitivity boost.</p>
      * <p>This key will be <code>null</code> for devices that do not support any RAW format
      * outputs. For devices that do support RAW format outputs, this key will always
@@ -2144,6 +2183,51 @@
      */
     ACAMERA_FLASH_INFO_AVAILABLE =                              // byte (acamera_metadata_enum_android_flash_info_available_t)
             ACAMERA_FLASH_INFO_START,
+    /**
+     * <p>Maximum flashlight brightness level.</p>
+     *
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>If this value is greater than 1, then the device supports controlling the
+     * flashlight brightness level via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#turnOnTorchWithStrengthLevel">CameraManager#turnOnTorchWithStrengthLevel</a>.
+     * If this value is equal to 1, flashlight brightness control is not supported.
+     * The value for this key will be null for devices with no flash unit.</p>
+     */
+    ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL =                 // int32
+            ACAMERA_FLASH_INFO_START + 2,
+    /**
+     * <p>Default flashlight brightness level to be set via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#turnOnTorchWithStrengthLevel">CameraManager#turnOnTorchWithStrengthLevel</a>.</p>
+     *
+     * <p>Type: int32</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>If flash unit is available this will be greater than or equal to 1 and less
+     * or equal to <code>ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL</code>.</p>
+     * <p>Setting flashlight brightness above the default level
+     * (i.e.<code>ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL</code>) may make the device more
+     * likely to reach thermal throttling conditions and slow down, or drain the
+     * battery quicker than normal. To minimize such issues, it is recommended to
+     * start the flashlight at this default brightness until a user explicitly requests
+     * a brighter level.
+     * Note that the value for this key will be null for devices with no flash unit.
+     * The default level should always be &gt; 0.</p>
+     *
+     * @see ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL
+     */
+    ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL =                 // int32
+            ACAMERA_FLASH_INFO_START + 3,
     ACAMERA_FLASH_INFO_END,
 
     /**
@@ -2341,7 +2425,7 @@
      *   and keep jpeg and thumbnail image data unrotated.</li>
      * <li>Rotate the jpeg and thumbnail image data and not set
      *   <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>. In this
-     *   case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+     *   case, LIMITED or FULL hardware level devices will report rotated thumbnail size in
      *   capture result, so the width and height will be interchanged if 90 or 270 degree
      *   orientation is requested. LEGACY device will always report unrotated thumbnail
      *   size.</li>
@@ -2370,7 +2454,7 @@
      *
      * <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
      * thumbnail should be generated.</p>
-     * <p>Below condiditions will be satisfied for this size list:</p>
+     * <p>Below conditions will be satisfied for this size list:</p>
      * <ul>
      * <li>The sizes will be sorted by increasing pixel area (width x height).
      * If several resolutions have the same area, they will be sorted by increasing width.</li>
@@ -2526,12 +2610,18 @@
      * <p>If a camera device supports both OIS and digital image stabilization
      * (ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE), turning both modes on may produce undesirable
      * interaction, so it is recommended not to enable both at the same time.</p>
+     * <p>If ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE is set to "PREVIEW_STABILIZATION",
+     * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE is overridden. The camera sub-system may choose
+     * to turn on hardware based image stabilization in addition to software based stabilization
+     * if it deems that appropriate. This key's value in the capture result will reflect which
+     * OIS mode was chosen.</p>
      * <p>Not all devices will support OIS; see
      * ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION for
      * available controls.</p>
      *
      * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
      * @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+     * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
      */
     ACAMERA_LENS_OPTICAL_STABILIZATION_MODE =                   // byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)
             ACAMERA_LENS_START + 4,
@@ -2634,6 +2724,9 @@
      * with PRIMARY_CAMERA.</p>
      * <p>When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
      * represented by the camera device, and will be represented as <code>(0, 0, 0)</code>.</p>
+     * <p>When ACAMERA_LENS_POSE_REFERENCE is AUTOMOTIVE, then this position is relative to the
+     * origin of the automotive sensor coordinate system, which is at the center of the rear
+     * axle.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -2675,7 +2768,7 @@
      * <p>When the state is STATIONARY, the lens parameters are not changing. This could be
      * either because the parameters are all fixed, or because the lens has had enough
      * time to reach the most recently-requested values.
-     * If all these lens parameters are not changable for a camera device, as listed below:</p>
+     * If all these lens parameters are not changeable for a camera device, as listed below:</p>
      * <ul>
      * <li>Fixed focus (<code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE == 0</code>), which means
      * ACAMERA_LENS_FOCUS_DISTANCE parameter will always be 0.</li>
@@ -3127,7 +3220,7 @@
      * the camera device. Using more streams simultaneously may require more hardware and
      * CPU resources that will consume more power. The image format for an output stream can
      * be any supported format provided by ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS.
-     * The formats defined in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS can be catergorized
+     * The formats defined in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS can be categorized
      * into the 3 stream types as below:</p>
      * <ul>
      * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
@@ -3354,7 +3447,7 @@
      * but clients should be aware and expect delays during their application.
      * An example usage scenario could look like this:</p>
      * <ul>
-     * <li>The camera client starts by quering the session parameter key list via
+     * <li>The camera client starts by querying the session parameter key list via
      *   {@link ACameraManager_getCameraCharacteristics }.</li>
      * <li>Before triggering the capture session create sequence, a capture request
      *   must be built via
@@ -3393,16 +3486,36 @@
      * </ul></p>
      *
      * <p>This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which contains a list
-     * of keys that can be overridden using <a href="https://developer.android.com/reference/CaptureRequest/Builder.html#setPhysicalCameraKey">Builder#setPhysicalCameraKey</a>.
+     * of keys that can be overridden using
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.Builder.html#setPhysicalCameraKey">Builder#setPhysicalCameraKey</a>.
      * The respective value of such request key can be obtained by calling
-     * <a href="https://developer.android.com/reference/CaptureRequest/Builder.html#getPhysicalCameraKey">Builder#getPhysicalCameraKey</a>. Capture requests that contain
-     * individual physical device requests must be built via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.Builder.html#getPhysicalCameraKey">Builder#getPhysicalCameraKey</a>.
+     * Capture requests that contain individual physical device requests must be built via
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureRequest(int,">Set)</a>.</p>
      *
      * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
      */
     ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS =    // int32[n]
             ACAMERA_REQUEST_START + 17,
+    /**
+     * <p>A map of all available 10-bit dynamic range profiles along with their
+     * capture request constraints.</p>
+     *
+     * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>Devices supporting the 10-bit output capability
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+     * must list their supported dynamic range profiles. In case the camera is not able to
+     * support every possible profile combination within a single capture request, then the
+     * constraints must be listed here as well.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP =      // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
+            ACAMERA_REQUEST_START + 19,
     ACAMERA_REQUEST_END,
 
     /**
@@ -3601,7 +3714,7 @@
      * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
      * <p>For applications targeting SDK version 31 or newer, if the mobile device declares to be
      * media performance class 12 or higher by setting
-     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CDOES/MEDIA_PERFORMANCE_CLASS.html">MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
+     * <a href="https://developer.android.com/reference/android/os/Build.VERSION.html#MEDIA_PERFORMANCE_CLASS">VERSION#MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
      * the primary camera devices (first rear/front camera in the camera ID list) will not
      * support JPEG sizes smaller than 1080p. If the application configures a JPEG stream
      * smaller than 1080p, the camera device will round up the JPEG image size to at least
@@ -3620,7 +3733,7 @@
      * IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |</p>
      * <p>For applications targeting SDK version 31 or newer, if the mobile device doesn't declare
      * to be media performance class 12 or better by setting
-     * <a href="https://developer.android.com/reference/android/os/Build/VERSION_CDOES/MEDIA_PERFORMANCE_CLASS.html">MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
+     * <a href="https://developer.android.com/reference/android/os/Build.VERSION.html#MEDIA_PERFORMANCE_CLASS">VERSION#MEDIA_PERFORMANCE_CLASS</a> to be 31 or larger,
      * or if the camera device isn't a primary rear/front camera, the minimum required output
      * stream configurations are the same as for applications targeting SDK version older than
      * 31.</p>
@@ -4114,19 +4227,70 @@
      * to output different resolution images depending on the current active physical camera or
      * pixel mode. With multi-resolution input streams, the camera device can reprocess images
      * of different resolutions from different physical cameras or sensor pixel modes.</p>
-     * <p>When set to TRUE:
-     * * For a logical multi-camera, the camera framework derives
+     * <p>When set to TRUE:</p>
+     * <ul>
+     * <li>For a logical multi-camera, the camera framework derives
      * android.scaler.multiResolutionStreamConfigurationMap by combining the
      * ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS from its physical
-     * cameras.
-     * * For an ultra-high resolution sensor camera, the camera framework directly copies
+     * cameras.</li>
+     * <li>For an ultra-high resolution sensor camera, the camera framework directly copies
      * the value of ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS to
-     * android.scaler.multiResolutionStreamConfigurationMap.</p>
+     * android.scaler.multiResolutionStreamConfigurationMap.</li>
+     * </ul>
      *
      * @see ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
      */
     ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED =          // byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)
             ACAMERA_SCALER_START + 24,
+    /**
+     * <p>The stream use cases supported by this camera device.</p>
+     *
+     * <p>Type: int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>The stream use case indicates the purpose of a particular camera stream from
+     * the end-user perspective. Some examples of camera use cases are: preview stream for
+     * live viewfinder shown to the user, still capture for generating high quality photo
+     * capture, video record for encoding the camera output for the purpose of future playback,
+     * and video call for live realtime video conferencing.</p>
+     * <p>With this flag, the camera device can optimize the image processing pipeline
+     * parameters, such as tuning, sensor mode, and ISP settings, independent of
+     * the properties of the immediate camera output surface. For example, if the output
+     * surface is a SurfaceTexture, the stream use case flag can be used to indicate whether
+     * the camera frames eventually go to display, video encoder,
+     * still image capture, or all of them combined.</p>
+     * <p>The application sets the use case of a camera stream by calling
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setStreamUseCase">OutputConfiguration#setStreamUseCase</a>.</p>
+     * <p>A camera device with
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
+     * capability must support the following stream use cases:</p>
+     * <ul>
+     * <li>DEFAULT</li>
+     * <li>PREVIEW</li>
+     * <li>STILL_CAPTURE</li>
+     * <li>VIDEO_RECORD</li>
+     * <li>PREVIEW_VIDEO_STILL</li>
+     * <li>VIDEO_CALL</li>
+     * </ul>
+     * <p>The guaranteed stream combinations related to stream use case for a camera device with
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
+     * capability is documented in the camera device
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>. The
+     * application is strongly recommended to use one of the guaranteed stream combinations.
+     * If the application creates a session with a stream combination not in the guaranteed
+     * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
+     * the camera device may ignore some stream use cases due to hardware constraints
+     * and implementation details.</p>
+     * <p>For stream combinations not covered by the stream use case mandatory lists, such as
+     * reprocessable session, constrained high speed session, or RAW stream combinations, the
+     * application should leave stream use cases within the session as DEFAULT.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES =                 // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+            ACAMERA_SCALER_START + 25,
     ACAMERA_SCALER_END,
 
     /**
@@ -4680,7 +4844,7 @@
      * noise model used here is:</p>
      * <p>N(x) = sqrt(Sx + O)</p>
      * <p>Where x represents the recorded signal of a CFA channel normalized to
-     * the range [0, 1], and S and O are the noise model coeffiecients for
+     * the range [0, 1], and S and O are the noise model coefficients for
      * that channel.</p>
      * <p>A more detailed description of the noise model can be found in the
      * Adobe DNG specification for the NoiseProfile tag.</p>
@@ -4729,7 +4893,7 @@
      * <li>1.20 &lt;= R &gt;= 1.03 will require some software
      * correction to avoid demosaic errors (3-20% divergence).</li>
      * <li>R &gt; 1.20 will require strong software correction to produce
-     * a usuable image (&gt;20% divergence).</li>
+     * a usable image (&gt;20% divergence).</li>
      * </ul>
      * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
      * the camera device has RAW capability.</p>
@@ -4986,7 +5150,7 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul></p>
      *
-     * <p>This key will only be present in devices advertisting the
+     * <p>This key will only be present in devices advertising the
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
      * capability which also advertise <code>REMOSAIC_REPROCESSING</code> capability. On all other devices
      * RAW targets will have a regular bayer pattern.</p>
@@ -6171,9 +6335,11 @@
      *   <li>ACaptureRequest</li>
      * </ul></p>
      *
-     * <p>The tonemap curve will be defined the following formula:
-     * * OUT = pow(IN, 1.0 / gamma)
-     * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+     * <p>The tonemap curve will be defined the following formula:</p>
+     * <ul>
+     * <li>OUT = pow(IN, 1.0 / gamma)</li>
+     * </ul>
+     * <p>where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
      * pow is the power function and gamma is the gamma value specified by this
      * key.</p>
      * <p>The same curve will be applied to all color channels. The camera device
@@ -7071,6 +7237,87 @@
             ACAMERA_HEIC_START + 5,
     ACAMERA_HEIC_END,
 
+    /**
+     * <p>Location of the cameras on the automotive devices.</p>
+     *
+     * <p>Type: byte (acamera_metadata_enum_android_automotive_location_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This enum defines the locations of the cameras relative to the vehicle body frame on
+     * <a href="https://source.android.com/devices/sensors/sensor-types#auto_axes">the automotive sensor coordinate system</a>.
+     * If the system has FEATURE_AUTOMOTIVE, the camera will have this entry in its static
+     * metadata.</p>
+     * <ul>
+     * <li>INTERIOR is the inside of the vehicle body frame (or the passenger cabin).</li>
+     * <li>EXTERIOR is the outside of the vehicle body frame.</li>
+     * <li>EXTRA is the extra vehicle such as a trailer.</li>
+     * </ul>
+     * <p>Each side of the vehicle body frame on this coordinate system is defined as below:</p>
+     * <ul>
+     * <li>FRONT is where the Y-axis increases toward.</li>
+     * <li>REAR is where the Y-axis decreases toward.</li>
+     * <li>LEFT is where the X-axis decreases toward.</li>
+     * <li>RIGHT is where the X-axis increases toward.</li>
+     * </ul>
+     * <p>If the camera has either EXTERIOR_OTHER or EXTRA_OTHER, its static metadata will list
+     * the following entries, so that applications can determine the camera's exact location:</p>
+     * <ul>
+     * <li>ACAMERA_LENS_POSE_REFERENCE</li>
+     * <li>ACAMERA_LENS_POSE_ROTATION</li>
+     * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+     * </ul>
+     *
+     * @see ACAMERA_LENS_POSE_REFERENCE
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION =                               // byte (acamera_metadata_enum_android_automotive_location_t)
+            ACAMERA_AUTOMOTIVE_START,
+    ACAMERA_AUTOMOTIVE_END,
+
+    /**
+     * <p>The direction of the camera faces relative to the vehicle body frame and the
+     * passenger seats.</p>
+     *
+     * <p>Type: byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This enum defines the lens facing characteristic of the cameras on the automotive
+     * devices with locations ACAMERA_AUTOMOTIVE_LOCATION defines.  If the system has
+     * FEATURE_AUTOMOTIVE, the camera will have this entry in its static metadata.</p>
+     * <p>When ACAMERA_AUTOMOTIVE_LOCATION is INTERIOR, this has one or more INTERIOR_*
+     * values or a single EXTERIOR_* value.  When this has more than one INTERIOR_*,
+     * the first value must be the one for the seat closest to the optical axis. If this
+     * contains INTERIOR_OTHER, all other values will be ineffective.</p>
+     * <p>When ACAMERA_AUTOMOTIVE_LOCATION is EXTERIOR_* or EXTRA, this has a single
+     * EXTERIOR_* value.</p>
+     * <p>If a camera has INTERIOR_OTHER or EXTERIOR_OTHER, or more than one camera is at the
+     * same location and facing the same direction, their static metadata will list the
+     * following entries, so that applications can determine their lenses' exact facing
+     * directions:</p>
+     * <ul>
+     * <li>ACAMERA_LENS_POSE_REFERENCE</li>
+     * <li>ACAMERA_LENS_POSE_ROTATION</li>
+     * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+     * </ul>
+     *
+     * @see ACAMERA_AUTOMOTIVE_LOCATION
+     * @see ACAMERA_LENS_POSE_REFERENCE
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING =                            // byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)
+            ACAMERA_AUTOMOTIVE_LENS_START,
+    ACAMERA_AUTOMOTIVE_LENS_END,
+
 } acamera_metadata_tag_t;
 
 /**
@@ -7972,6 +8219,17 @@
      */
     ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_ON                      = 1,
 
+    /**
+     * <p>Preview stabilization, where the preview in addition to all other non-RAW streams are
+     * stabilized with the same quality of stabilization, is enabled. This mode aims to give
+     * clients a 'what you see is what you get' effect. In this mode, the FoV reduction will
+     * be a maximum of 20 % both horizontally and vertically
+     * (10% from left, right, top, bottom) for the given zoom ratio / crop region.
+     * The resultant FoV will also be the same across all processed streams
+     * (that have the same aspect ratio).</p>
+     */
+    ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION   = 2,
+
 } acamera_metadata_enum_android_control_video_stabilization_mode_t;
 
 // ACAMERA_CONTROL_AE_STATE
@@ -8452,6 +8710,14 @@
      */
     ACAMERA_LENS_POSE_REFERENCE_UNDEFINED                            = 2,
 
+    /**
+     * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the origin of the
+     * automotive sensor coordinate system, which is at the center of the rear axle.</p>
+     *
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_LENS_POSE_REFERENCE_AUTOMOTIVE                           = 3,
+
 } acamera_metadata_enum_android_lens_pose_reference_t;
 
 
@@ -8765,7 +9031,7 @@
      * for the largest YUV_420_888 size.</p>
      * <p>If the device supports the {@link AIMAGE_FORMAT_RAW10 }, {@link AIMAGE_FORMAT_RAW12 }, {@link AIMAGE_FORMAT_Y8 }, then those can also be
      * captured at the same rate as the maximum-size YUV_420_888 resolution is.</p>
-     * <p>In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
+     * <p>In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranteed to have a value between 0
      * and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
      * are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
      * consistent image output.</p>
@@ -8931,7 +9197,7 @@
      * non-active physical cameras. For example, if the logical camera has a wide-ultrawide
      * configuration where the wide lens is the default, when the crop region is set to the
      * logical camera's active array size, (and the zoom ratio set to 1.0 starting from
-     * Android 11), a physical stream for the ultrawide camera may prefer outputing images
+     * Android 11), a physical stream for the ultrawide camera may prefer outputting images
      * with larger field-of-view than that of the wide camera for better stereo matching
      * margin or more robust motion tracking. At the same time, the physical non-RAW streams'
      * field of view must not be smaller than the requested crop region and zoom ratio, as
@@ -9052,8 +9318,132 @@
     ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
                                                                       = 16,
 
+    /**
+     * <p>The camera device supports selecting a per-stream use case via
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setStreamUseCase">OutputConfiguration#setStreamUseCase</a>
+     * so that the device can optimize camera pipeline parameters such as tuning, sensor
+     * mode, or ISP settings for a specific user scenario.
+     * Some sample usages of this capability are:</p>
+     * <ul>
+     * <li>Distinguish high quality YUV captures from a regular YUV stream where
+     *   the image quality may not be as good as the JPEG stream, or</li>
+     * <li>Use one stream to serve multiple purposes: viewfinder, video recording and
+     *   still capture. This is common with applications that wish to apply edits equally
+     *   to preview, saved images, and saved videos.</li>
+     * </ul>
+     * <p>This capability requires the camera device to support the following
+     * stream use cases:</p>
+     * <ul>
+     * <li>DEFAULT for backward compatibility where the application doesn't set
+     *   a stream use case</li>
+     * <li>PREVIEW for live viewfinder and in-app image analysis</li>
+     * <li>STILL_CAPTURE for still photo capture</li>
+     * <li>VIDEO_RECORD for recording video clips</li>
+     * <li>PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video
+     *   recording, and still capture.</li>
+     * <li>VIDEO_CALL for long running video calls</li>
+     * </ul>
+     * <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
+     * lists all of the supported stream use cases.</p>
+     * <p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
+     * mandatory stream combinations involving stream use cases, which can also be queried
+     * via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE           = 19,
+
 } acamera_metadata_enum_android_request_available_capabilities_t;
 
+// ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_dynamic_range_profiles_map {
+    /**
+     * <p>8-bit SDR profile which is the default for all non 10-bit output capable devices.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD    = 0x1,
+
+    /**
+     * <p>10-bit pixel samples encoded using the Hybrid log-gamma transfer function.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10       = 0x2,
+
+    /**
+     * <p>10-bit pixel samples encoded using the SMPTE ST 2084 transfer function.
+     * This profile utilizes internal static metadata to increase the quality
+     * of the capture.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10       = 0x4,
+
+    /**
+     * <p>10-bit pixel samples encoded using the SMPTE ST 2084 transfer function.
+     * In contrast to HDR10, this profile uses internal per-frame metadata
+     * to further enhance the quality of the capture.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS  = 0x8,
+
+    /**
+     * <p>This is a camera mode for Dolby Vision capture optimized for a more scene
+     * accurate capture. This would typically differ from what a specific device
+     * might want to tune for a consumer optimized Dolby Vision general capture.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF
+                                                                      = 0x10,
+
+    /**
+     * <p>This is the power optimized mode for 10-bit Dolby Vision HDR Reference Mode.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO
+                                                                      = 0x20,
+
+    /**
+     * <p>This is the camera mode for the default Dolby Vision capture mode for the
+     * specific device. This would be tuned by each specific device for consumer
+     * pleasing results that resonate with their particular audience. We expect
+     * that each specific device would have a different look for their default
+     * Dolby Vision capture.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM
+                                                                      = 0x40,
+
+    /**
+     * <p>This is the power optimized mode for 10-bit Dolby Vision HDR device specific
+     * capture Mode.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO
+                                                                      = 0x80,
+
+    /**
+     * <p>This is the 8-bit version of the Dolby Vision reference capture mode optimized
+     * for scene accuracy.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF
+                                                                      = 0x100,
+
+    /**
+     * <p>This is the power optimized mode for 8-bit Dolby Vision HDR Reference Mode.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO
+                                                                      = 0x200,
+
+    /**
+     * <p>This is the 8-bit version of device specific tuned and optimized Dolby Vision
+     * capture mode.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM
+                                                                      = 0x400,
+
+    /**
+     * <p>This is the power optimized mode for 8-bit Dolby Vision HDR device specific
+     * capture Mode.</p>
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
+                                                                      = 0x800,
+
+    /**
+     *
+     */
+    ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX         = 0x1000,
+
+} acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
+
 
 // ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
 typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
@@ -9145,6 +9535,20 @@
                                                                       = 0x7,
 
     /**
+     * <p>If supported, the recommended 10-bit output stream configurations must include
+     * a subset of the advertised <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YCBCR_P010">ImageFormat#YCBCR_P010</a> and
+     * <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> outputs that are optimized for power
+     * and performance when registered along with a supported 10-bit dynamic range profile.
+     * see android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile for
+     * details.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_10BIT_OUTPUT
+                                                                      = 0x8,
+
+    ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8
+                                                                      = 0x9,
+
+    /**
      * <p>Vendor defined use cases. These depend on the vendor implementation.</p>
      */
     ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START
@@ -9225,6 +9629,76 @@
 
 } acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
 
+// ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_use_cases {
+    /**
+     * <p>Default stream use case.</p>
+     * <p>This use case is the same as when the application doesn't set any use case for
+     * the stream. The camera device uses the properties of the output target, such as
+     * format, dataSpace, or surface class type, to optimize the image processing pipeline.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT                = 0x0,
+
+    /**
+     * <p>Live stream shown to the user.</p>
+     * <p>Optimized for performance and usability as a viewfinder, but not necessarily for
+     * image quality. The output is not meant to be persisted as saved images or video.</p>
+     * <p>No stall if ACAMERA_CONTROL_* are set to FAST. There may be stall if
+     * they are set to HIGH_QUALITY. This use case has the same behavior as the
+     * default SurfaceView and SurfaceTexture targets. Additionally, this use case can be
+     * used for in-app image analysis.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW                = 0x1,
+
+    /**
+     * <p>Still photo capture.</p>
+     * <p>Optimized for high-quality high-resolution capture, and not expected to maintain
+     * preview-like frame rates.</p>
+     * <p>The stream may have stalls regardless of whether ACAMERA_CONTROL_* is HIGH_QUALITY.
+     * This use case has the same behavior as the default JPEG and RAW related formats.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE          = 0x2,
+
+    /**
+     * <p>Recording video clips.</p>
+     * <p>Optimized for high-quality video capture, including high-quality image stabilization
+     * if supported by the device and enabled by the application. As a result, may produce
+     * output frames with a substantial lag from real time, to allow for highest-quality
+     * stabilization or other processing. As such, such an output is not suitable for drawing
+     * to screen directly, and is expected to be persisted to disk or similar for later
+     * playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed
+     * to have video stabilization applied when the video stabilization control is set
+     * to ON, as opposed to PREVIEW_STABILIZATION.</p>
+     * <p>This use case has the same behavior as the default MediaRecorder and MediaCodec
+     * targets.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD           = 0x3,
+
+    /**
+     * <p>One single stream used for combined purposes of preview, video, and still capture.</p>
+     * <p>For such multi-purpose streams, the camera device aims to make the best tradeoff
+     * between the individual use cases. For example, the STILL_CAPTURE use case by itself
+     * may have stalls for achieving best image quality. But if combined with PREVIEW and
+     * VIDEO_RECORD, the camera device needs to trade off the additional image processing
+     * for speed so that preview and video recording aren't slowed down.</p>
+     * <p>Similarly, VIDEO_RECORD may produce frames with a substantial lag, but
+     * PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video
+     * stabilization with this use case, the device must support and the app must select the
+     * PREVIEW_STABILIZATION mode for video stabilization.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL    = 0x4,
+
+    /**
+     * <p>Long-running video call optimized for both power efficiency and video quality.</p>
+     * <p>The camera sensor may run in a lower-resolution mode to reduce power consumption
+     * at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL
+     * outputs are expected to work in dark conditions, so are usually accompanied with
+     * variable frame rate settings to allow sufficient exposure time in low light.</p>
+     */
+    ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL             = 0x5,
+
+} acamera_metadata_enum_android_scaler_available_stream_use_cases_t;
+
 
 // ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
@@ -9642,7 +10116,7 @@
     ACAMERA_TONEMAP_MODE_HIGH_QUALITY                                = 2,
 
     /**
-     * <p>Use the gamma value specified in ACAMERA_TONEMAP_GAMMA to peform
+     * <p>Use the gamma value specified in ACAMERA_TONEMAP_GAMMA to perform
      * tonemapping.</p>
      * <p>All color enhancement and tonemapping must be disabled, except
      * for applying the tonemapping curve specified by ACAMERA_TONEMAP_GAMMA.</p>
@@ -9654,7 +10128,7 @@
 
     /**
      * <p>Use the preset tonemapping curve specified in
-     * ACAMERA_TONEMAP_PRESET_CURVE to peform tonemapping.</p>
+     * ACAMERA_TONEMAP_PRESET_CURVE to perform tonemapping.</p>
      * <p>All color enhancement and tonemapping must be disabled, except
      * for applying the tonemapping curve specified by
      * ACAMERA_TONEMAP_PRESET_CURVE.</p>
@@ -9752,7 +10226,7 @@
      * fire the flash for flash power metering during precapture, and then fire the flash
      * for the final capture, if a flash is available on the device and the AE mode is set to
      * enable the flash.</p>
-     * <p>Devices that initially shipped with Android version <a href="https://developer.android.com/reference/android/os/Build/VERSION_CODES.html#Q">Q</a> or newer will not include any LEGACY-level devices.</p>
+     * <p>Devices that initially shipped with Android version <a href="https://developer.android.com/reference/android/os/Build.VERSION_CODES.html#Q">Q</a> or newer will not include any LEGACY-level devices.</p>
      *
      * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
      * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -9986,6 +10460,167 @@
 
 
 
+// ACAMERA_AUTOMOTIVE_LOCATION
+typedef enum acamera_metadata_enum_acamera_automotive_location {
+    /**
+     * <p>The camera device exists inside of the vehicle cabin.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_INTERIOR                             = 0,
+
+    /**
+     * <p>The camera exists outside of the vehicle body frame but not exactly on one of the
+     * exterior locations this enum defines.  The applications should determine the exact
+     * location from ACAMERA_LENS_POSE_TRANSLATION.</p>
+     *
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_OTHER                       = 1,
+
+    /**
+     * <p>The camera device exists outside of the vehicle body frame and on its front side.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT                       = 2,
+
+    /**
+     * <p>The camera device exists outside of the vehicle body frame and on its rear side.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_REAR                        = 3,
+
+    /**
+     * <p>The camera device exists outside and on left side of the vehicle body frame.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT                        = 4,
+
+    /**
+     * <p>The camera device exists outside and on right side of the vehicle body frame.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT                       = 5,
+
+    /**
+     * <p>The camera device exists on an extra vehicle, such as the trailer, but not exactly
+     * on one of front, rear, left, or right side.  Applications should determine the exact
+     * location from ACAMERA_LENS_POSE_TRANSLATION.</p>
+     *
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_OTHER                          = 6,
+
+    /**
+     * <p>The camera device exists outside of the extra vehicle's body frame and on its front
+     * side.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_FRONT                          = 7,
+
+    /**
+     * <p>The camera device exists outside of the extra vehicle's body frame and on its rear
+     * side.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_REAR                           = 8,
+
+    /**
+     * <p>The camera device exists outside and on left side of the extra vehicle body.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_LEFT                           = 9,
+
+    /**
+     * <p>The camera device exists outside and on right side of the extra vehicle body.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_RIGHT                          = 10,
+
+} acamera_metadata_enum_android_automotive_location_t;
+
+
+// ACAMERA_AUTOMOTIVE_LENS_FACING
+typedef enum acamera_metadata_enum_acamera_automotive_lens_facing {
+    /**
+     * <p>The camera device faces the outside of the vehicle body frame but not exactly
+     * one of the exterior sides defined by this enum.  Applications should determine
+     * the exact facing direction from ACAMERA_LENS_POSE_ROTATION and
+     * ACAMERA_LENS_POSE_TRANSLATION.</p>
+     *
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_OTHER                    = 0,
+
+    /**
+     * <p>The camera device faces the front of the vehicle body frame.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_FRONT                    = 1,
+
+    /**
+     * <p>The camera device faces the rear of the vehicle body frame.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_REAR                     = 2,
+
+    /**
+     * <p>The camera device faces the left side of the vehicle body frame.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_LEFT                     = 3,
+
+    /**
+     * <p>The camera device faces the right side of the vehicle body frame.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_RIGHT                    = 4,
+
+    /**
+     * <p>The camera device faces the inside of the vehicle body frame but not exactly
+     * one of seats described by this enum.  Applications should determine the exact
+     * facing direction from ACAMERA_LENS_POSE_ROTATION and ACAMERA_LENS_POSE_TRANSLATION.</p>
+     *
+     * @see ACAMERA_LENS_POSE_ROTATION
+     * @see ACAMERA_LENS_POSE_TRANSLATION
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_OTHER                    = 5,
+
+    /**
+     * <p>The camera device faces the left side seat of the first row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_LEFT          = 6,
+
+    /**
+     * <p>The camera device faces the center seat of the first row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_CENTER        = 7,
+
+    /**
+     * <p>The camera device faces the right seat of the first row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_RIGHT         = 8,
+
+    /**
+     * <p>The camera device faces the left side seat of the second row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_LEFT          = 9,
+
+    /**
+     * <p>The camera device faces the center seat of the second row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_CENTER        = 10,
+
+    /**
+     * <p>The camera device faces the right side seat of the second row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_RIGHT         = 11,
+
+    /**
+     * <p>The camera device faces the left side seat of the third row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_LEFT          = 12,
+
+    /**
+     * <p>The camera device faces the center seat of the third row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_CENTER        = 13,
+
+    /**
+     * <p>The camera device faces the right seat of the third row.</p>
+     */
+    ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT         = 14,
+
+} acamera_metadata_enum_android_automotive_lens_facing_t;
+
+
 
 __END_DECLS
 
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 2b630db..b3977ff 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -2,11 +2,15 @@
   global:
     ACameraCaptureSession_abortCaptures;
     ACameraCaptureSession_capture;
+    ACameraCaptureSession_captureV2; # introduced=33
     ACameraCaptureSession_logicalCamera_capture; # introduced=29
+    ACameraCaptureSession_logicalCamera_captureV2; # introduced=33
     ACameraCaptureSession_close;
     ACameraCaptureSession_getDevice;
     ACameraCaptureSession_setRepeatingRequest;
+    ACameraCaptureSession_setRepeatingRequestV2; # introduced=33
     ACameraCaptureSession_logicalCamera_setRepeatingRequest; # introduced=29
+    ACameraCaptureSession_logicalCamera_setRepeatingRequestV2; # introduced=33
     ACameraCaptureSession_stopRepeating;
     ACameraCaptureSession_updateSharedOutput; # introduced=28
     ACameraDevice_close;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 9f63099..0a57590 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -29,8 +29,6 @@
 #include "ACaptureRequest.h"
 #include "utils.h"
 
-#include "ACameraCaptureSession.inc"
-
 #define CHECK_TRANSACTION_AND_RET(remoteRet, status, callName) \
     if (!remoteRet.isOk()) { \
         ALOGE("%s: Transaction error during %s call %s", __FUNCTION__, callName, \
@@ -332,7 +330,8 @@
                 return ACAMERA_ERROR_UNKNOWN;
     }
 
-    mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfigW);
+    mConfiguredOutputs[streamId] =
+            std::move(std::make_pair(std::move(output->mWindow), std::move(outConfigW)));
 
     return ACAMERA_OK;
 }
@@ -492,6 +491,7 @@
     }
 
     if (mRemote != nullptr) {
+        ALOGD("%s: binder disconnect reached", __FUNCTION__);
         auto ret = mRemote->disconnect();
         if (!ret.isOk()) {
             ALOGE("%s: Transaction error while disconnecting device %s", __FUNCTION__,
@@ -625,7 +625,8 @@
         outConfigInsert.windowHandles[0] = anw;
         outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
         native_handle_ptr_wrapper wrap(anw);
-        outputSet.insert(std::make_pair(anw, outConfigInsertW));
+
+        outputSet.emplace(std::make_pair(std::move(anw), std::move(outConfigInsertW)));
     }
     std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> addSet = outputSet;
     std::vector<int32_t> deleteList;
@@ -682,7 +683,7 @@
     }
 
     // add new streams
-    for (auto outputPair : addSet) {
+    for (const auto &outputPair : addSet) {
         int streamId;
         Status status = Status::UNKNOWN_ERROR;
         auto ret = mRemote->createStream(outputPair.second,
@@ -847,12 +848,32 @@
             return;
         }
 
-        const auto& windowHandles = outputPairIt->second.second.mOutputConfiguration.windowHandles;
-        for (const auto& outHandle : windowHandles) {
-            for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
-                int32_t windowId = streamAndWindowId.windowId;
-                if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
-                    const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
+        // Get the surfaces corresponding to the error stream id, go through
+        // them and try to match the surfaces in the corresponding
+        // CaptureRequest.
+        const auto& errorWindowHandles =
+                outputPairIt->second.second.mOutputConfiguration.windowHandles;
+        for (const auto& errorWindowHandle : errorWindowHandles) {
+            for (const auto &requestStreamAndWindowId :
+                        request->mCaptureRequest.streamAndWindowIds) {
+                // Go through the surfaces in the capture request and see which
+                // ones match the surfaces in the error stream.
+                int32_t requestWindowId = requestStreamAndWindowId.windowId;
+                auto requestSurfacePairIt =
+                        mConfiguredOutputs.find(requestStreamAndWindowId.streamId);
+                if (requestSurfacePairIt == mConfiguredOutputs.end()) {
+                    ALOGE("%s: Error: request stream id %d does not exist", __FUNCTION__,
+                              requestStreamAndWindowId.streamId);
+                    setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
+                    return;
+                }
+
+                const auto &requestWindowHandles =
+                        requestSurfacePairIt->second.second.mOutputConfiguration.windowHandles;
+                if (utils::isWindowNativeHandleEqual(
+                        requestWindowHandles[requestWindowId], errorWindowHandle)) {
+                    const native_handle_t* anw =
+                            requestWindowHandles[requestWindowId].getNativeHandle();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -910,6 +931,7 @@
         case kWhatOnError:
         case kWhatSessionStateCb:
         case kWhatCaptureStart:
+        case kWhatCaptureStart2:
         case kWhatCaptureResult:
         case kWhatLogicalCaptureResult:
         case kWhatCaptureFail:
@@ -982,6 +1004,7 @@
         }
         case kWhatSessionStateCb:
         case kWhatCaptureStart:
+        case kWhatCaptureStart2:
         case kWhatCaptureResult:
         case kWhatLogicalCaptureResult:
         case kWhatCaptureFail:
@@ -1002,6 +1025,7 @@
             const char *id_cstr = mId.c_str();
             switch (msg->what()) {
                 case kWhatCaptureStart:
+                case kWhatCaptureStart2:
                 case kWhatCaptureResult:
                 case kWhatLogicalCaptureResult:
                 case kWhatCaptureFail:
@@ -1053,6 +1077,35 @@
                     freeACaptureRequest(request);
                     break;
                 }
+                case kWhatCaptureStart2:
+                {
+                    ACameraCaptureSession_captureCallback_startV2 onStart2;
+                    found = msg->findPointer(kCallbackFpKey, (void**) &onStart2);
+                    if (!found) {
+                        ALOGE("%s: Cannot find capture startV2 callback!", __FUNCTION__);
+                        return;
+                    }
+                    if (onStart2 == nullptr) {
+                        return;
+                    }
+                    int64_t timestamp;
+                    found = msg->findInt64(kTimeStampKey, &timestamp);
+                    if (!found) {
+                        ALOGE("%s: Cannot find timestamp!", __FUNCTION__);
+                        return;
+                    }
+                    int64_t frameNumber;
+                    found = msg->findInt64(kFrameNumberKey, &frameNumber);
+                    if (!found) {
+                        ALOGE("%s: Cannot find frame number!", __FUNCTION__);
+                        return;
+                    }
+
+                    ACaptureRequest* request = allocateACaptureRequest(requestSp, id_cstr);
+                    (*onStart2)(context, session.get(), request, timestamp, frameNumber);
+                    freeACaptureRequest(request);
+                    break;
+                }
                 case kWhatCaptureResult:
                 {
                     ACameraCaptureSession_captureCallback_result onResult;
@@ -1281,6 +1334,7 @@
         ACameraCaptureSession_captureCallbacks* cbs) :
         mSession(session), mRequests(requests),
         mIsRepeating(isRepeating),
+        mIs2Callback(false),
         mIsLogicalCameraCallback(false) {
     initCaptureCallbacks(cbs);
 
@@ -1297,6 +1351,7 @@
         ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
         mSession(session), mRequests(requests),
         mIsRepeating(isRepeating),
+        mIs2Callback(false),
         mIsLogicalCameraCallback(true) {
     initCaptureCallbacks(lcbs);
 
@@ -1306,6 +1361,40 @@
     }
 }
 
+CameraDevice::CallbackHolder::CallbackHolder(
+        sp<ACameraCaptureSession>          session,
+        const Vector<sp<CaptureRequest> >& requests,
+        bool                               isRepeating,
+        ACameraCaptureSession_captureCallbacksV2* cbs) :
+        mSession(session), mRequests(requests),
+        mIsRepeating(isRepeating),
+        mIs2Callback(true),
+        mIsLogicalCameraCallback(false) {
+    initCaptureCallbacksV2(cbs);
+
+    if (cbs != nullptr) {
+        mOnCaptureCompleted = cbs->onCaptureCompleted;
+        mOnCaptureFailed = cbs->onCaptureFailed;
+    }
+}
+
+CameraDevice::CallbackHolder::CallbackHolder(
+        sp<ACameraCaptureSession>          session,
+        const Vector<sp<CaptureRequest> >& requests,
+        bool                               isRepeating,
+        ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs) :
+        mSession(session), mRequests(requests),
+        mIsRepeating(isRepeating),
+        mIs2Callback(true),
+        mIsLogicalCameraCallback(true) {
+    initCaptureCallbacksV2(lcbs);
+
+    if (lcbs != nullptr) {
+        mOnLogicalCameraCaptureCompleted = lcbs->onLogicalCameraCaptureCompleted;
+        mOnLogicalCameraCaptureFailed = lcbs->onLogicalCameraCaptureFailed;
+    }
+}
+
 void
 CameraDevice::checkRepeatingSequenceCompleteLocked(
     const int sequenceId, const int64_t lastFrameNumber) {
@@ -1542,11 +1631,14 @@
 
     int32_t sequenceId = resultExtras.requestId;
     int32_t burstId = resultExtras.burstId;
+    int64_t frameNumber = resultExtras.frameNumber;
 
     auto it = dev->mSequenceCallbackMap.find(sequenceId);
     if (it != dev->mSequenceCallbackMap.end()) {
         CallbackHolder cbh = (*it).second;
         ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
+        ACameraCaptureSession_captureCallback_startV2 onStart2 = cbh.mOnCaptureStarted2;
+        bool v2Callback = cbh.mIs2Callback;
         sp<ACameraCaptureSession> session = cbh.mSession;
         if ((size_t) burstId >= cbh.mRequests.size()) {
             ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1554,12 +1646,19 @@
             dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
         }
         sp<CaptureRequest> request = cbh.mRequests[burstId];
-        sp<AMessage> msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+        sp<AMessage> msg = nullptr;
+        if (v2Callback) {
+            msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
+            msg->setPointer(kCallbackFpKey, (void*) onStart2);
+        } else {
+            msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+            msg->setPointer(kCallbackFpKey, (void*) onStart);
+        }
         msg->setPointer(kContextKey, cbh.mContext);
         msg->setObject(kSessionSpKey, session);
-        msg->setPointer(kCallbackFpKey, (void*) onStart);
         msg->setObject(kCaptureRequestKey, request);
         msg->setInt64(kTimeStampKey, timestamp);
+        msg->setInt64(kFrameNumberKey, frameNumber);
         dev->postSessionMsgAndCleanup(msg);
     }
     return ret;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 0b6c7c8..c306206 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -245,6 +245,7 @@
         kWhatSessionStateCb,   // onReady, onActive
         // Capture callbacks
         kWhatCaptureStart,     // onCaptureStarted
+        kWhatCaptureStart2,     // onCaptureStarted2
         kWhatCaptureResult,    // onCaptureProgressed, onCaptureCompleted
         kWhatLogicalCaptureResult, // onLogicalCameraCaptureCompleted
         kWhatCaptureFail,      // onCaptureFailed
@@ -309,11 +310,18 @@
                        const Vector<sp<CaptureRequest>>&  requests,
                        bool                               isRepeating,
                        ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
-
-        template <class T>
-        void initCaptureCallbacks(T* cbs) {
+        CallbackHolder(sp<ACameraCaptureSession>          session,
+                       const Vector<sp<CaptureRequest> >& requests,
+                       bool                               isRepeating,
+                       ACameraCaptureSession_captureCallbacksV2* cbs);
+        CallbackHolder(sp<ACameraCaptureSession>          session,
+                       const Vector<sp<CaptureRequest> >& requests,
+                       bool                               isRepeating,
+                       ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs);
+        void clearCallbacks() {
             mContext = nullptr;
             mOnCaptureStarted = nullptr;
+            mOnCaptureStarted2 = nullptr;
             mOnCaptureProgressed = nullptr;
             mOnCaptureCompleted = nullptr;
             mOnLogicalCameraCaptureCompleted = nullptr;
@@ -322,6 +330,24 @@
             mOnCaptureSequenceCompleted = nullptr;
             mOnCaptureSequenceAborted = nullptr;
             mOnCaptureBufferLost = nullptr;
+        }
+
+        template <class T>
+        void initCaptureCallbacksV2(T* cbs) {
+            clearCallbacks();
+            if (cbs != nullptr) {
+                mContext = cbs->context;
+                mOnCaptureStarted2 = cbs->onCaptureStarted;
+                mOnCaptureProgressed = cbs->onCaptureProgressed;
+                mOnCaptureSequenceCompleted = cbs->onCaptureSequenceCompleted;
+                mOnCaptureSequenceAborted = cbs->onCaptureSequenceAborted;
+                mOnCaptureBufferLost = cbs->onCaptureBufferLost;
+            }
+        }
+
+        template <class T>
+        void initCaptureCallbacks(T* cbs) {
+            clearCallbacks();
             if (cbs != nullptr) {
                 mContext = cbs->context;
                 mOnCaptureStarted = cbs->onCaptureStarted;
@@ -335,10 +361,12 @@
         sp<ACameraCaptureSession>   mSession;
         Vector<sp<CaptureRequest>>  mRequests;
         const bool                  mIsRepeating;
+        const bool                  mIs2Callback;
         const bool                  mIsLogicalCameraCallback;
 
         void*                       mContext;
         ACameraCaptureSession_captureCallback_start mOnCaptureStarted;
+        ACameraCaptureSession_captureCallback_startV2 mOnCaptureStarted2;
         ACameraCaptureSession_captureCallback_result mOnCaptureProgressed;
         ACameraCaptureSession_captureCallback_result mOnCaptureCompleted;
         ACameraCaptureSession_logicalCamera_captureCallback_result mOnLogicalCameraCaptureCompleted;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 8359bb1..4663529 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -136,6 +136,8 @@
             return !(*this == other);
         }
         bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
@@ -146,6 +148,7 @@
             if (mPhysicalCamUnavailable != other.mPhysicalCamUnavailable)
                     return mPhysicalCamUnavailable < other.mPhysicalCamUnavailable;
             return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
         }
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 6f5820e..62779a4 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -109,8 +109,30 @@
         mOutputConfiguration.windowGroupId = -1;
     };
 
-    OutputConfigurationWrapper(OutputConfiguration &outputConfiguration)
-            : mOutputConfiguration((outputConfiguration)) { }
+    OutputConfigurationWrapper(const OutputConfigurationWrapper &other) {
+        *this = other;
+    }
+
+    // Needed to make sure that OutputConfiguration in
+    // OutputConfigurationWrapper, when copied doesn't call hidl_handle's
+    // assignment operator / copy constructor, which will lead to native handle
+    // cloning, which is not what we want for app callbacks which have the native
+    // handle as parameter.
+    OutputConfigurationWrapper &operator=(const OutputConfigurationWrapper &other) {
+        const OutputConfiguration &outputConfiguration = other.mOutputConfiguration;
+        mOutputConfiguration.rotation = outputConfiguration.rotation;
+        mOutputConfiguration.isDeferred = outputConfiguration.isDeferred;
+        mOutputConfiguration.width = outputConfiguration.width;
+        mOutputConfiguration.height = outputConfiguration.height;
+        mOutputConfiguration.windowGroupId = outputConfiguration.windowGroupId;
+        mOutputConfiguration.windowHandles.resize(outputConfiguration.windowHandles.size());
+        mOutputConfiguration.physicalCameraId = outputConfiguration.physicalCameraId;
+        size_t i = 0;
+        for (const auto &handle : outputConfiguration.windowHandles) {
+            mOutputConfiguration.windowHandles[i++] = handle.getNativeHandle();
+        }
+        return *this;
+    }
 
     bool operator ==(const OutputConfiguration &other) const {
         const OutputConfiguration &self = mOutputConfiguration;
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index ba14c5c..63cdb76 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -236,6 +236,11 @@
         return ACameraCaptureSession_capture(mSession, &mCaptureCallbacks, 1, &mStillRequest,
                                              &seqId);
     }
+    int takePicture2() {
+        int seqId;
+        return ACameraCaptureSession_captureV2(mSession, &mCaptureCallbacksV2, 1,
+                &mStillRequest, &seqId);
+    }
 
     int takeLogicalCameraPicture() {
         int seqId;
@@ -243,15 +248,31 @@
                 1, &mStillRequest, &seqId);
     }
 
+    int takeLogicalCameraPicture2() {
+        int seqId;
+        return ACameraCaptureSession_logicalCamera_captureV2(mSession,
+                &mLogicalCaptureCallbacksV2, 1, &mStillRequest, &seqId);
+    }
+
     bool checkCallbacks(int pictureCount) {
         std::lock_guard<std::mutex> lock(mMutex);
         if (mCompletedCaptureCallbackCount != pictureCount) {
-            ALOGE("Completed capture callaback count not as expected. expected %d actual %d",
+            ALOGE("Completed capture callback count not as expected. expected %d actual %d",
                   pictureCount, mCompletedCaptureCallbackCount);
             return false;
         }
         return true;
     }
+    bool checkCallbacksV2(int pictureCount) {
+        std::lock_guard<std::mutex> lock(mMutex);
+        if (mCaptureStartedCallbackCount != pictureCount) {
+            ALOGE("Capture started callback count not as expected. expected %d actual %d",
+                  pictureCount, mCaptureStartedCallbackCount);
+            return false;
+        }
+        return true;
+    }
+
 
    private:
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
@@ -276,6 +297,7 @@
     const char* mCameraId;
     ACameraManager* mCameraManager;
     int mCompletedCaptureCallbackCount = 0;
+    int mCaptureStartedCallbackCount = 0;
     std::mutex mMutex;
     ACameraCaptureSession_captureCallbacks mCaptureCallbacks = {
         // TODO: Add tests for other callbacks
@@ -293,8 +315,25 @@
         nullptr, // onCaptureSequenceAborted
         nullptr, // onCaptureBufferLost
     };
+    ACameraCaptureSession_captureCallbacksV2 mCaptureCallbacksV2 = {
+        this, // context
+        [](void* ctx , ACameraCaptureSession *,const ACaptureRequest *, int64_t,
+              int64_t frameNumber ) {
+            CameraHelper *ch = static_cast<CameraHelper *>(ctx);
+            ASSERT_TRUE(frameNumber >= 0);
+            std::lock_guard<std::mutex> lock(ch->mMutex);
+            ch->mCaptureStartedCallbackCount++;
+        },
+        nullptr, // onCaptureProgressed
+        nullptr, // onCaptureCompleted
+        nullptr, // onCaptureFailed
+        nullptr, // onCaptureSequenceCompleted
+        nullptr, // onCaptureSequenceAborted
+        nullptr, // onCaptureBufferLost
+    };
 
     std::vector<std::string> mPhysicalCameraIds;
+
     ACameraCaptureSession_logicalCamera_captureCallbacks mLogicalCaptureCallbacks = {
         // TODO: Add tests for other callbacks
         this, // context
@@ -336,6 +375,23 @@
         nullptr, // onCaptureSequenceAborted
         nullptr, // onCaptureBufferLost
     };
+    ACameraCaptureSession_logicalCamera_captureCallbacksV2 mLogicalCaptureCallbacksV2 = {
+        this, // context
+        [](void* ctx , ACameraCaptureSession *,const ACaptureRequest *, int64_t,
+                int64_t frameNumber) {
+            CameraHelper *ch = static_cast<CameraHelper *>(ctx);
+            ASSERT_TRUE(frameNumber >= 0);
+            std::lock_guard<std::mutex> lock(ch->mMutex);
+            ch->mCaptureStartedCallbackCount++;
+        },
+        nullptr, // onCaptureProgressed
+        nullptr, //onLogicalCaptureCompleted
+        nullptr, //onLogicalCpatureFailed
+        nullptr, // onCaptureSequenceCompleted
+        nullptr, // onCaptureSequenceAborted
+        nullptr, // onCaptureBufferLost
+    };
+
 };
 
 class ImageReaderTestCase {
@@ -570,7 +626,7 @@
     }
 
     bool takePictures(const char* id, uint64_t readerUsage, int readerMaxImages,
-            bool readerAsync, int pictureCount) {
+            bool readerAsync, int pictureCount, bool v2 = false) {
         int ret = 0;
 
         ImageReaderTestCase testCase(
@@ -600,7 +656,11 @@
         }
 
         for (int i = 0; i < pictureCount; i++) {
-            ret = cameraHelper.takePicture();
+            if (v2) {
+                ret = cameraHelper.takePicture2();
+            } else {
+                ret = cameraHelper.takePicture();
+            }
             if (ret < 0) {
                 ALOGE("Unable to take picture");
                 return false;
@@ -617,7 +677,8 @@
             }
         }
         return testCase.getAcquiredImageCount() == pictureCount &&
-                cameraHelper.checkCallbacks(pictureCount);
+               v2 ? cameraHelper.checkCallbacksV2(pictureCount) :
+                    cameraHelper.checkCallbacks(pictureCount);
     }
 
     bool testTakePicturesNative(const char* id) {
@@ -626,12 +687,14 @@
             for (auto& readerMaxImages : {1, 4, 8}) {
                 for (auto& readerAsync : {true, false}) {
                     for (auto& pictureCount : {1, 4, 8}) {
-                        if (!takePictures(id, readerUsage, readerMaxImages,
-                                readerAsync, pictureCount)) {
-                            ALOGE("Test takePictures failed for test case usage=%" PRIu64
-                                  ", maxImages=%d, async=%d, pictureCount=%d",
-                                  readerUsage, readerMaxImages, readerAsync, pictureCount);
-                            return false;
+                        for ( auto & v2 : {true, false}) {
+                            if (!takePictures(id, readerUsage, readerMaxImages,
+                                    readerAsync, pictureCount, v2)) {
+                                ALOGE("Test takePictures failed for test case usage=%" PRIu64
+                                      ", maxImages=%d, async=%d, pictureCount=%d",
+                                      readerUsage, readerMaxImages, readerAsync, pictureCount);
+                                return false;
+                            }
                         }
                     }
                 }
@@ -725,7 +788,7 @@
         return;
     }
 
-    void testLogicalCameraPhysicalStream(bool usePhysicalSettings) {
+    void testLogicalCameraPhysicalStream(bool usePhysicalSettings, bool v2) {
         const char* cameraId = nullptr;
         ACameraMetadata* staticMetadata = nullptr;
         std::vector<const char*> physicalCameraIds;
@@ -772,7 +835,12 @@
         }
 
         for (int i = 0; i < pictureCount; i++) {
-            ret = cameraHelper.takeLogicalCameraPicture();
+            if (v2) {
+              ret = cameraHelper.takeLogicalCameraPicture2();
+            }
+            else {
+              ret = cameraHelper.takeLogicalCameraPicture();
+            }
             ASSERT_EQ(ret, 0);
         }
 
@@ -793,8 +861,11 @@
             ALOGI("Testing window %p", testCase->getNativeWindow());
             ASSERT_EQ(testCase->getAcquiredImageCount(), pictureCount);
         }
-
-        ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));
+        if (v2) {
+            ASSERT_TRUE(cameraHelper.checkCallbacksV2(pictureCount));
+        } else {
+            ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));
+        }
 
         ACameraMetadata_free(staticMetadata);
     }
@@ -834,8 +905,10 @@
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
-    testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/);
-    testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/);
+    for (auto & v2 : {true, false}) {
+        testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
+        testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
+    }
 }
 
 }  // namespace
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 9f2f430..17ea512 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -96,6 +96,12 @@
         return binder::Status::ok();
     };
 
+    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+            int32_t /*torchStrength*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // No op
         return binder::Status::ok();
diff --git a/cmds/OWNERS b/cmds/OWNERS
index 0d32aac..a48c37a 100644
--- a/cmds/OWNERS
+++ b/cmds/OWNERS
@@ -1,3 +1,3 @@
 elaurent@google.com
+essick@google.com
 lajos@google.com
-marcone@google.com
diff --git a/cmds/screenrecord/Android.bp b/cmds/screenrecord/Android.bp
index 359a835..d0b3ce0 100644
--- a/cmds/screenrecord/Android.bp
+++ b/cmds/screenrecord/Android.bp
@@ -55,12 +55,6 @@
         "libGLESv2",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/av/media/libstagefright/include",
-        "frameworks/native/include/media/openmax",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index e6e3473..2e0b678 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -701,7 +701,7 @@
         printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                 layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
                 displayMode.refreshRate, toCString(displayState.orientation),
-                displayState.layerStack);
+                displayState.layerStack.id);
         fflush(stdout);
     }
 
@@ -1067,7 +1067,7 @@
 
     std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
     if (!displayId) {
-        fprintf(stderr, "Failed to get token for internal display\n");
+        fprintf(stderr, "Failed to get ID for internal display\n");
         return 1;
     }
 
@@ -1168,17 +1168,14 @@
             }
             break;
         case 'd':
-            gPhysicalDisplayId = PhysicalDisplayId(atoll(optarg));
-            if (gPhysicalDisplayId.value == 0) {
-                fprintf(stderr, "Please specify a valid physical display id\n");
-                return 2;
-            } else if (SurfaceComposerClient::
-                    getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
-                fprintf(stderr, "Invalid physical display id: %s\n",
-                        to_string(gPhysicalDisplayId).c_str());
-                return 2;
+            if (const auto id = android::DisplayId::fromValue<PhysicalDisplayId>(atoll(optarg));
+                id && SurfaceComposerClient::getPhysicalDisplayToken(*id)) {
+                gPhysicalDisplayId = *id;
+                break;
             }
-            break;
+
+            fprintf(stderr, "Invalid physical display ID\n");
+            return 2;
         default:
             if (ic != '?') {
                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/stagefright/Android.bp b/cmds/stagefright/Android.bp
new file mode 100644
index 0000000..e1fe07e
--- /dev/null
+++ b/cmds/stagefright/Android.bp
@@ -0,0 +1,276 @@
+package {
+    default_applicable_licenses: ["frameworks_av_cmds_stagefright_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_cmds_stagefright_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_binary {
+    name: "stagefright",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "stagefright.cpp",
+        "jpeg.cpp",
+        "SineSource.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "libmedia_codeclist",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libjpeg",
+        "libui",
+        "libgui",
+        "libcutils",
+        "liblog",
+        "libhidlbase",
+        "libdatasource",
+        "libaudioclient",
+        "android.hardware.media.omx@1.0",
+        "framework-permission-aidl-cpp",
+    ],
+
+    static_libs: ["framework-permission-aidl-cpp"],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+
+    system_ext_specific: true,
+}
+
+cc_binary {
+    name: "record",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "SineSource.cpp",
+        "record.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+        "camera_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libdatasource",
+        "libaudioclient",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "recordvideo",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "recordvideo.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libaudioclient",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "audioloop",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "SineSource.cpp",
+        "audioloop.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libaudioclient",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "stream",
+
+    srcs: ["stream.cpp"],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libui",
+        "libgui",
+        "libstagefright_foundation",
+        "libmedia",
+        "libcutils",
+        "libdatasource",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "codec",
+
+    srcs: [
+        "codec.cpp",
+        "SimplePlayer.cpp",
+    ],
+
+    header_libs: [
+        "libmediadrm_headers",
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libmedia",
+        "libmedia_omx",
+        "libaudioclient",
+        "libui",
+        "libgui",
+        "libcutils",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "mediafilter",
+
+    srcs: [
+        "filters/argbtorgba.rscript",
+        "filters/nightvision.rscript",
+        "filters/saturation.rscript",
+        "mediafilter.cpp",
+    ],
+
+    header_libs: [
+        "libmediadrm_headers",
+        "libmediametrics_headers",
+        "libstagefright_headers",
+        "rs-headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libmedia_omx",
+        "libui",
+        "libgui",
+        "libRScpp",
+    ],
+
+    static_libs: ["libstagefright_mediafilter"],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+
+    sanitize: {
+        cfi: true,
+    },
+}
+
+cc_binary {
+    name: "muxer",
+
+    srcs: ["muxer.cpp"],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libcutils",
+        "libc",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
deleted file mode 100644
index 803c4a4..0000000
--- a/cmds/stagefright/Android.mk
+++ /dev/null
@@ -1,276 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=       \
-        AudioPlayer.cpp \
-        stagefright.cpp \
-        jpeg.cpp        \
-        SineSource.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia libmedia_codeclist libutils libbinder \
-        libstagefright_foundation libjpeg libui libgui libcutils liblog \
-        libhidlbase libdatasource libaudioclient \
-        android.hardware.media.omx@1.0 \
-        framework-permission-aidl-cpp
-
-LOCAL_STATIC_LIBRARIES := framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/av/media/libstagefright/include \
-        frameworks/native/include/media/openmax \
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_SYSTEM_EXT_MODULE:= true
-LOCAL_MODULE:= stagefright
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        AudioPlayer.cpp \
-        SineSource.cpp    \
-        record.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libdatasource libaudioclient \
-        framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/camera/include \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax \
-        frameworks/native/include/media/hardware
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= record
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        AudioPlayer.cpp \
-        recordvideo.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libaudioclient
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax \
-        frameworks/native/include/media/hardware \
-        framework-permission-aidl-cpp
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= recordvideo
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        AudioPlayer.cpp \
-        SineSource.cpp    \
-        audioloop.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libaudioclient \
-        framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= audioloop
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        stream.cpp    \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libui libgui \
-        libstagefright_foundation libmedia libcutils libdatasource
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= stream
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=               \
-        codec.cpp               \
-        SimplePlayer.cpp        \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediadrm_headers \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libstagefright_foundation \
-        libmedia libmedia_omx libaudioclient libui libgui libcutils
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= codec
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        filters/argbtorgba.rscript \
-        filters/nightvision.rscript \
-        filters/saturation.rscript \
-        mediafilter.cpp \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediadrm_headers \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright \
-        liblog \
-        libutils \
-        libbinder \
-        libstagefright_foundation \
-        libmedia_omx \
-        libui \
-        libgui \
-        libRScpp \
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax \
-        frameworks/rs/cpp \
-        frameworks/rs \
-
-intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
-LOCAL_C_INCLUDES += $(intermediates)
-
-LOCAL_STATIC_LIBRARIES:= \
-        libstagefright_mediafilter
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= mediafilter
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-LOCAL_SANITIZE := cfi
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=               \
-        muxer.cpp            \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libstagefright_foundation \
-        libcutils libc
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= muxer
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index 55427ca..6cddf47 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -249,7 +249,8 @@
 
         mAudioTrack = new AudioTrack(
                 AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask,
-                0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this,
+                0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE,
+                wp<IAudioTrackCallback>::fromExisting(this),
                 0 /*notificationFrames*/);
 
         if ((err = mAudioTrack->initCheck()) != OK) {
@@ -397,10 +398,6 @@
     mStartPosUs = 0;
 }
 
-// static
-void AudioPlayer::AudioCallback(int event, void *user, void *info) {
-    static_cast<AudioPlayer *>(user)->AudioCallback(event, info);
-}
 
 bool AudioPlayer::reachedEOS(status_t *finalStatus) {
     *finalStatus = OK;
@@ -455,20 +452,12 @@
     return 0;
 }
 
-void AudioPlayer::AudioCallback(int event, void *info) {
-    switch (event) {
-    case AudioTrack::EVENT_MORE_DATA:
-        {
-        AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
-        size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size);
-        buffer->size = numBytesWritten;
-        }
-        break;
+size_t AudioPlayer::onMoreData(const AudioTrack::Buffer& buffer) {
+    return fillBuffer(buffer.data(), buffer.size());
+}
 
-    case AudioTrack::EVENT_STREAM_END:
-        mReachedEOS = true;
-        break;
-    }
+void AudioPlayer::onStreamEnd() {
+    mReachedEOS = true;
 }
 
 size_t AudioPlayer::fillBuffer(void *data, size_t size) {
diff --git a/cmds/stagefright/AudioPlayer.h b/cmds/stagefright/AudioPlayer.h
index 43550ea..608f54b 100644
--- a/cmds/stagefright/AudioPlayer.h
+++ b/cmds/stagefright/AudioPlayer.h
@@ -19,6 +19,7 @@
 #define AUDIO_PLAYER_H_
 
 #include <media/AudioResamplerPublic.h>
+#include <media/AudioTrack.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -26,10 +27,9 @@
 
 namespace android {
 
-class AudioTrack;
 struct AwesomePlayer;
 
-class AudioPlayer {
+class AudioPlayer : AudioTrack::IAudioTrackCallback {
 public:
     enum {
         REACHED_EOS,
@@ -66,6 +66,9 @@
     status_t getPlaybackRate(AudioPlaybackRate *rate /* nonnull */);
 
 private:
+    friend sp<AudioPlayer>;
+    size_t onMoreData(const AudioTrack::Buffer& buffer) override;
+    void onStreamEnd() override;
     sp<MediaSource> mSource;
     sp<AudioTrack> mAudioTrack;
 
@@ -99,9 +102,6 @@
     int64_t mStartPosUs;
     const uint32_t mCreateFlags;
 
-    static void AudioCallback(int event, void *user, void *info);
-    void AudioCallback(int event, void *info);
-
     static size_t AudioSinkCallback(
             MediaPlayerBase::AudioSink *audioSink,
             void *data, size_t size, void *me,
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index 4b41ff8..83f8fe9 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -166,9 +166,9 @@
         sp<MediaSource> decoder = SimpleDecodingSource::Create(encoder);
 
         if (playToSpeaker) {
-            AudioPlayer player(NULL);
-            player.setSource(decoder);
-            player.start();
+            sp<AudioPlayer> player = sp<AudioPlayer>::make(nullptr);
+            player->setSource(decoder);
+            player->start();
             sleep(duration);
 
 ALOGI("Line: %d", __LINE__);
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 098c278..5743ad6 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -32,7 +32,6 @@
 #include <media/stagefright/SimpleDecodingSource.h>
 #include <media/MediaPlayerInterface.h>
 
-#include "AudioPlayer.h"
 
 using namespace android;
 
@@ -274,17 +273,6 @@
     const int32_t kNumChannels = 2;
     sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels);
 
-#if 0
-    sp<MediaPlayerBase::AudioSink> audioSink;
-    AudioPlayer *player = new AudioPlayer(audioSink);
-    player->setSource(audioSource);
-    player->start();
-
-    sleep(10);
-
-    player->stop();
-#endif
-
     sp<AMessage> encMeta = new AMessage;
     encMeta->setString("mime",
             0 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index c430f05..9783855 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -70,6 +70,10 @@
 
 using namespace android;
 
+namespace {
+    constexpr static int PIXEL_FORMAT_RGBA_1010102_AS_8888 = -HAL_PIXEL_FORMAT_RGBA_1010102;
+}
+
 static long gNumRepetitions;
 static long gMaxNumFrames;  // 0 means decode all available.
 static long gReproduceBug;  // if not -1.
@@ -220,7 +224,7 @@
     }
 
     if (gPlaybackAudio) {
-        AudioPlayer *player = new AudioPlayer(NULL);
+        sp<AudioPlayer> player = sp<AudioPlayer>::make(nullptr);
         player->setSource(rawSource);
         rawSource.clear();
 
@@ -235,9 +239,6 @@
             fprintf(stderr, "unable to start playback err=%d (0x%08x)\n", err, err);
         }
 
-        delete player;
-        player = NULL;
-
         return;
     } else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
         int64_t durationUs;
@@ -629,7 +630,14 @@
     fprintf(stderr, "       -m max-number-of-frames-to-decode in each pass\n");
     fprintf(stderr, "       -b bug to reproduce\n");
     fprintf(stderr, "       -i(nfo) dump codec info (profiles and color formats supported, details)\n");
-    fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art\n");
+    fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art (/sdcard/out.jpg)\n");
+    fprintf(stderr, "       -P(ixelFormat) pixel format to use for raw thumbnail "
+                    "(/sdcard/out.raw)\n");
+    fprintf(stderr, "          %d: RGBA_565\n", HAL_PIXEL_FORMAT_RGB_565);
+    fprintf(stderr, "          %d: RGBA_8888\n", HAL_PIXEL_FORMAT_RGBA_8888);
+    fprintf(stderr, "          %d: BGRA_8888\n", HAL_PIXEL_FORMAT_BGRA_8888);
+    fprintf(stderr, "          %d: RGBA_1010102\n", HAL_PIXEL_FORMAT_RGBA_1010102);
+    fprintf(stderr, "          %d: RGBA_1010102 as RGBA_8888\n", PIXEL_FORMAT_RGBA_1010102_AS_8888);
     fprintf(stderr, "       -s(oftware) prefer software codec\n");
     fprintf(stderr, "       -r(hardware) force to use hardware codec\n");
     fprintf(stderr, "       -o playback audio\n");
@@ -787,6 +795,7 @@
     bool useSurfaceTexAlloc = false;
     bool dumpStream = false;
     bool dumpPCMStream = false;
+    int32_t pixelFormat = 0;        // thumbnail pixel format
     String8 dumpStreamFilename;
     gNumRepetitions = 1;
     gMaxNumFrames = 0;
@@ -800,7 +809,7 @@
     sp<android::ALooper> looper;
 
     int res;
-    while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
+    while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:P:")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -841,6 +850,7 @@
                 break;
             }
 
+            case 'P':
             case 'm':
             case 'n':
             case 'b':
@@ -856,6 +866,8 @@
                     gNumRepetitions = x;
                 } else if (res == 'm') {
                     gMaxNumFrames = x;
+                } else if (res == 'P') {
+                    pixelFormat = x;
                 } else {
                     CHECK_EQ(res, 'b');
                     gReproduceBug = x;
@@ -978,24 +990,71 @@
             close(fd);
             fd = -1;
 
+            uint32_t retrieverPixelFormat = HAL_PIXEL_FORMAT_RGB_565;
+            if (pixelFormat == PIXEL_FORMAT_RGBA_1010102_AS_8888) {
+                retrieverPixelFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
+            } else if (pixelFormat) {
+                retrieverPixelFormat = pixelFormat;
+            }
             sp<IMemory> mem =
                     retriever->getFrameAtTime(-1,
                             MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
-                            HAL_PIXEL_FORMAT_RGB_565,
-                            false /*metaOnly*/);
+                            retrieverPixelFormat, false /*metaOnly*/);
 
             if (mem != NULL) {
                 failed = false;
-                printf("getFrameAtTime(%s) => OK\n", filename);
+                printf("getFrameAtTime(%s) format=%d => OK\n", filename, retrieverPixelFormat);
 
                 VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
 
-                CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
-                            frame->getFlattenedData(),
-                            frame->mWidth, frame->mHeight), 0);
+                if (pixelFormat) {
+                    int bpp = 0;
+                    switch (pixelFormat) {
+                    case HAL_PIXEL_FORMAT_RGB_565:
+                        bpp = 2;
+                        break;
+                    case PIXEL_FORMAT_RGBA_1010102_AS_8888:
+                        // convert RGBA_1010102 to RGBA_8888
+                        {
+                            uint32_t *data = (uint32_t *)frame->getFlattenedData();
+                            uint32_t *end = data + frame->mWidth * frame->mHeight;
+                            for (; data < end; ++data) {
+                                *data =
+                                    // pick out 8-bit R, G, B values and move them to the
+                                    // correct position
+                                    ( (*data &      0x3fc) >> 2) | // R
+                                    ( (*data &    0xff000) >> 4) | // G
+                                    ( (*data & 0x3fc00000) >> 6) | // B
+                                    // pick out 2-bit A and expand to 8-bits
+                                    (((*data & 0xc0000000) >> 6) * 0x55);
+                            }
+                        }
+
+                        FALLTHROUGH_INTENDED;
+
+                    case HAL_PIXEL_FORMAT_RGBA_1010102:
+                    case HAL_PIXEL_FORMAT_RGBA_8888:
+                    case HAL_PIXEL_FORMAT_BGRA_8888:
+                        bpp = 4;
+                        break;
+                    }
+                    if (bpp) {
+                        FILE *out = fopen("/sdcard/out.raw", "wb");
+                        fwrite(frame->getFlattenedData(), bpp * frame->mWidth, frame->mHeight, out);
+                        fclose(out);
+
+                        printf("write out %d x %d x %dbpp\n", frame->mWidth, frame->mHeight, bpp);
+                    } else {
+                        printf("unknown pixel format.\n");
+                    }
+                } else {
+                    CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
+                                frame->getFlattenedData(),
+                                frame->mWidth, frame->mHeight), 0);
+                }
             }
 
-            {
+            if (!pixelFormat) {
                 mem = retriever->extractAlbumArt();
 
                 if (mem != NULL) {
diff --git a/drm/OWNERS b/drm/OWNERS
index e788754..090c021 100644
--- a/drm/OWNERS
+++ b/drm/OWNERS
@@ -1 +1,3 @@
 jtinker@google.com
+kelzhan@google.com
+robertshih@google.com
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 74e3223..f7989bd 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -208,7 +208,11 @@
 }
 
 status_t DrmManager::loadPlugIns() {
+#if __LP64__
+    String8 pluginDirPath("/system/lib64/drm");
+#else
     String8 pluginDirPath("/system/lib/drm");
+#endif
     loadPlugIns(pluginDirPath);
     return DRM_NO_ERROR;
 }
diff --git a/drm/drmserver/drmserver.rc b/drm/drmserver/drmserver.rc
index eb176c1..0319ff9 100644
--- a/drm/drmserver/drmserver.rc
+++ b/drm/drmserver/drmserver.rc
@@ -3,7 +3,7 @@
     class main
     user drm
     group drm system inet drmrpc readproc
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
 
 on property:drm.service.enabled=true
     start drm
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 0ffe626..408d216 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -28,8 +28,13 @@
         "DrmSessionManager.cpp",
         "SharedLibrary.cpp",
         "DrmHal.cpp",
+        "DrmHalHidl.cpp",
+        "DrmHalAidl.cpp",
         "CryptoHal.cpp",
+        "CryptoHalHidl.cpp",
+        "CryptoHalAidl.cpp",
         "DrmUtils.cpp",
+        "DrmHalListener.cpp",
     ],
 
     local_include_dirs: [
@@ -63,10 +68,12 @@
         "android.hardware.drm@1.4",
         "libhidlallocatorutils",
         "libhidlbase",
+        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
+        "libaidlcommonsupport",
     ],
 
     export_shared_lib_headers: [
@@ -162,10 +169,6 @@
         "DrmMetricsConsumer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libmedia/include"
-    ],
-
     shared_libs: [
         "android.hardware.drm@1.0",
         "android.hardware.drm@1.1",
@@ -181,5 +184,6 @@
     header_libs: [
         "libmediametrics_headers",
         "libstagefright_foundation_headers",
+        "libmedia_headers",
     ],
 }
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index e0db1c4..f95d527 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -16,389 +16,100 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "CryptoHal"
-#include <utils/Log.h>
-
-#include <android/hardware/drm/1.0/types.h>
-#include <android/hidl/manager/1.2/IServiceManager.h>
-#include <hidl/ServiceManagement.h>
-#include <hidlmemory/FrameworkUtils.h>
-#include <media/hardware/CryptoAPI.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
 #include <mediadrm/CryptoHal.h>
+#include <mediadrm/CryptoHalHidl.h>
+#include <mediadrm/CryptoHalAidl.h>
 #include <mediadrm/DrmUtils.h>
 
-using drm::V1_0::BufferType;
-using drm::V1_0::DestinationBuffer;
-using drm::V1_0::ICryptoFactory;
-using drm::V1_0::ICryptoPlugin;
-using drm::V1_0::Mode;
-using drm::V1_0::Pattern;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-
-using ::android::DrmUtils::toStatusT;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_handle;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::HidlMemory;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-
-typedef drm::V1_2::Status Status_V1_2;
-
 namespace android {
 
-static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t> &vector) {
-    hidl_vec<uint8_t> vec;
-    vec.setToExternal(const_cast<uint8_t *>(vector.array()), vector.size());
-    return vec;
+CryptoHal::CryptoHal() {
+    mCryptoHalAidl = sp<CryptoHalAidl>::make();
+    mCryptoHalHidl = sp<CryptoHalHidl>::make();
 }
 
-static hidl_vec<uint8_t> toHidlVec(const void *ptr, size_t size) {
-    hidl_vec<uint8_t> vec;
-    vec.resize(size);
-    memcpy(vec.data(), ptr, size);
-    return vec;
-}
-
-static hidl_array<uint8_t, 16> toHidlArray16(const uint8_t *ptr) {
-    if (!ptr) {
-        return hidl_array<uint8_t, 16>();
-    }
-    return hidl_array<uint8_t, 16>(ptr);
-}
-
-
-static String8 toString8(hidl_string hString) {
-    return String8(hString.c_str());
-}
-
-
-CryptoHal::CryptoHal()
-    : mFactories(makeCryptoFactories()),
-      mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT),
-      mHeapSeqNum(0) {
-}
-
-CryptoHal::~CryptoHal() {
-}
-
-Vector<sp<ICryptoFactory>> CryptoHal::makeCryptoFactories() {
-    Vector<sp<ICryptoFactory>> factories;
-
-    auto manager = hardware::defaultServiceManager1_2();
-    if (manager != NULL) {
-        manager->listManifestByInterface(drm::V1_0::ICryptoFactory::descriptor,
-                [&factories](const hidl_vec<hidl_string> &registered) {
-                    for (const auto &instance : registered) {
-                        auto factory = drm::V1_0::ICryptoFactory::getService(instance);
-                        if (factory != NULL) {
-                            ALOGD("found drm@1.0 ICryptoFactory %s", instance.c_str());
-                            factories.push_back(factory);
-                        }
-                    }
-                }
-            );
-        manager->listManifestByInterface(drm::V1_1::ICryptoFactory::descriptor,
-                [&factories](const hidl_vec<hidl_string> &registered) {
-                    for (const auto &instance : registered) {
-                        auto factory = drm::V1_1::ICryptoFactory::getService(instance);
-                        if (factory != NULL) {
-                            ALOGD("found drm@1.1 ICryptoFactory %s", instance.c_str());
-                            factories.push_back(factory);
-                        }
-                    }
-                }
-            );
-    }
-
-    if (factories.size() == 0) {
-        // must be in passthrough mode, load the default passthrough service
-        auto passthrough = ICryptoFactory::getService();
-        if (passthrough != NULL) {
-            ALOGI("makeCryptoFactories: using default passthrough crypto instance");
-            factories.push_back(passthrough);
-        } else {
-            ALOGE("Failed to find any crypto factories");
-        }
-    }
-    return factories;
-}
-
-sp<ICryptoPlugin> CryptoHal::makeCryptoPlugin(const sp<ICryptoFactory>& factory,
-        const uint8_t uuid[16], const void *initData, size_t initDataSize) {
-
-    sp<ICryptoPlugin> plugin;
-    Return<void> hResult = factory->createPlugin(toHidlArray16(uuid),
-            toHidlVec(initData, initDataSize),
-            [&](Status status, const sp<ICryptoPlugin>& hPlugin) {
-                if (status != Status::OK) {
-                    ALOGE("Failed to make crypto plugin");
-                    return;
-                }
-                plugin = hPlugin;
-            }
-        );
-    if (!hResult.isOk()) {
-        mInitCheck = DEAD_OBJECT;
-    }
-    return plugin;
-}
-
+CryptoHal::~CryptoHal() {}
 
 status_t CryptoHal::initCheck() const {
-    return mInitCheck;
+    if (mCryptoHalAidl->initCheck() == OK || mCryptoHalHidl->initCheck() == OK) return OK;
+    if (mCryptoHalAidl->initCheck() == NO_INIT || mCryptoHalHidl->initCheck() == NO_INIT)
+        return NO_INIT;
+    return mCryptoHalHidl->initCheck();
 }
 
-
 bool CryptoHal::isCryptoSchemeSupported(const uint8_t uuid[16]) {
-    Mutex::Autolock autoLock(mLock);
-
-    for (size_t i = 0; i < mFactories.size(); i++) {
-        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
-            return true;
-        }
-    }
-    return false;
+    return mCryptoHalAidl->isCryptoSchemeSupported(uuid) ||
+           mCryptoHalHidl->isCryptoSchemeSupported(uuid);
 }
 
-status_t CryptoHal::createPlugin(const uint8_t uuid[16], const void *data,
-        size_t size) {
-    Mutex::Autolock autoLock(mLock);
-
-    for (size_t i = 0; i < mFactories.size(); i++) {
-        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
-            mPlugin = makeCryptoPlugin(mFactories[i], uuid, data, size);
-            if (mPlugin != NULL) {
-                mPluginV1_2 = drm::V1_2::ICryptoPlugin::castFrom(mPlugin);
-            }
-        }
-    }
-
-    if (mInitCheck == NO_INIT) {
-        mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
-    }
-
-    return mInitCheck;
+status_t CryptoHal::createPlugin(const uint8_t uuid[16], const void* data, size_t size) {
+    if (mCryptoHalAidl->createPlugin(uuid, data, size) != OK)
+        return mCryptoHalHidl->createPlugin(uuid, data, size);
+    return OK;
 }
 
 status_t CryptoHal::destroyPlugin() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    mPlugin.clear();
-    mPluginV1_2.clear();
-    return OK;
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->destroyPlugin();
+    return mCryptoHalHidl->destroyPlugin();
 }
 
-bool CryptoHal::requiresSecureDecoderComponent(const char *mime) const {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return false;
-    }
-
-    Return<bool> hResult = mPlugin->requiresSecureDecoderComponent(hidl_string(mime));
-    if (!hResult.isOk()) {
-        return false;
-    }
-    return hResult;
-}
-
-
-/**
- * If the heap base isn't set, get the heap base from the HidlMemory
- * and send it to the HAL so it can map a remote heap of the same
- * size.  Once the heap base is established, shared memory buffers
- * are sent by providing an offset into the heap and a buffer size.
- */
-int32_t CryptoHal::setHeapBase(const sp<HidlMemory>& heap) {
-    if (heap == NULL || mHeapSeqNum < 0) {
-        ALOGE("setHeapBase(): heap %p mHeapSeqNum %d", heap.get(), mHeapSeqNum);
-        return -1;
-    }
-
-    Mutex::Autolock autoLock(mLock);
-
-    int32_t seqNum = mHeapSeqNum++;
-    uint32_t bufferId = static_cast<uint32_t>(seqNum);
-    mHeapSizes.add(seqNum, heap->size());
-    Return<void> hResult = mPlugin->setSharedBufferBase(*heap, bufferId);
-    ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
-    return seqNum;
-}
-
-void CryptoHal::clearHeapBase(int32_t seqNum) {
-    Mutex::Autolock autoLock(mLock);
-
-    /*
-     * Clear the remote shared memory mapping by setting the shared
-     * buffer base to a null hidl_memory.
-     *
-     * TODO: Add a releaseSharedBuffer method in a future DRM HAL
-     * API version to make this explicit.
-     */
-    ssize_t index = mHeapSizes.indexOfKey(seqNum);
-    if (index >= 0) {
-        if (mPlugin != NULL) {
-            uint32_t bufferId = static_cast<uint32_t>(seqNum);
-            Return<void> hResult = mPlugin->setSharedBufferBase(hidl_memory(), bufferId);
-            ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
-        }
-        mHeapSizes.removeItem(seqNum);
-    }
-}
-
-status_t CryptoHal::checkSharedBuffer(const ::SharedBuffer &buffer) {
-    int32_t seqNum = static_cast<int32_t>(buffer.bufferId);
-    // memory must be in one of the heaps that have been set
-    if (mHeapSizes.indexOfKey(seqNum) < 0) {
-        return UNKNOWN_ERROR;
-    }
-
-    // memory must be within the address space of the heap
-    size_t heapSize = mHeapSizes.valueFor(seqNum);
-    if (heapSize < buffer.offset + buffer.size ||
-            SIZE_MAX - buffer.offset < buffer.size) {
-        android_errorWriteLog(0x534e4554, "76221123");
-        return UNKNOWN_ERROR;
-    }
-
-    return OK;
-}
-
-ssize_t CryptoHal::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
-        CryptoPlugin::Mode mode, const CryptoPlugin::Pattern &pattern,
-        const ::SharedBuffer &hSource, size_t offset,
-        const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
-        const ::DestinationBuffer &hDestination, AString *errorDetailMsg) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    Mode hMode;
-    switch(mode) {
-    case CryptoPlugin::kMode_Unencrypted:
-        hMode = Mode::UNENCRYPTED ;
-        break;
-    case CryptoPlugin::kMode_AES_CTR:
-        hMode = Mode::AES_CTR;
-        break;
-    case CryptoPlugin::kMode_AES_WV:
-        hMode = Mode::AES_CBC_CTS;
-        break;
-    case CryptoPlugin::kMode_AES_CBC:
-        hMode = Mode::AES_CBC;
-        break;
-    default:
-        return UNKNOWN_ERROR;
-    }
-
-    Pattern hPattern;
-    hPattern.encryptBlocks = pattern.mEncryptBlocks;
-    hPattern.skipBlocks = pattern.mSkipBlocks;
-
-    std::vector<SubSample> stdSubSamples;
-    for (size_t i = 0; i < numSubSamples; i++) {
-        SubSample subSample;
-        subSample.numBytesOfClearData = subSamples[i].mNumBytesOfClearData;
-        subSample.numBytesOfEncryptedData = subSamples[i].mNumBytesOfEncryptedData;
-        stdSubSamples.push_back(subSample);
-    }
-    auto hSubSamples = hidl_vec<SubSample>(stdSubSamples);
-
-    bool secure;
-    if (hDestination.type == BufferType::SHARED_MEMORY) {
-        status_t status = checkSharedBuffer(hDestination.nonsecureMemory);
-        if (status != OK) {
-            return status;
-        }
-        secure = false;
-    } else if (hDestination.type == BufferType::NATIVE_HANDLE) {
-        secure = true;
-    } else {
-        android_errorWriteLog(0x534e4554, "70526702");
-        return UNKNOWN_ERROR;
-    }
-
-    status_t status = checkSharedBuffer(hSource);
-    if (status != OK) {
-        return status;
-    }
-
-    status_t err = UNKNOWN_ERROR;
-    uint32_t bytesWritten = 0;
-
-    Return<void> hResult;
-
-    mLock.unlock();
-    if (mPluginV1_2 != NULL) {
-        hResult = mPluginV1_2->decrypt_1_2(secure, toHidlArray16(keyId), toHidlArray16(iv),
-                hMode, hPattern, hSubSamples, hSource, offset, hDestination,
-                [&](Status_V1_2 status, uint32_t hBytesWritten, hidl_string hDetailedError) {
-                    if (status == Status_V1_2::OK) {
-                        bytesWritten = hBytesWritten;
-                        *errorDetailMsg = toString8(hDetailedError);
-                    }
-                    err = toStatusT(status);
-                }
-            );
-    } else {
-        hResult = mPlugin->decrypt(secure, toHidlArray16(keyId), toHidlArray16(iv),
-                hMode, hPattern, hSubSamples, hSource, offset, hDestination,
-                [&](Status status, uint32_t hBytesWritten, hidl_string hDetailedError) {
-                    if (status == Status::OK) {
-                        bytesWritten = hBytesWritten;
-                        *errorDetailMsg = toString8(hDetailedError);
-                    }
-                    err = toStatusT(status);
-                }
-            );
-    }
-
-    err = hResult.isOk() ? err : DEAD_OBJECT;
-    if (err == OK) {
-        return bytesWritten;
-    }
-    return err;
+bool CryptoHal::requiresSecureDecoderComponent(const char* mime) const {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK)
+        return mCryptoHalAidl->requiresSecureDecoderComponent(mime);
+    return mCryptoHalHidl->requiresSecureDecoderComponent(mime);
 }
 
 void CryptoHal::notifyResolution(uint32_t width, uint32_t height) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK) {
+        mCryptoHalAidl->notifyResolution(width, height);
         return;
     }
 
-    auto hResult = mPlugin->notifyResolution(width, height);
-    ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
+    mCryptoHalHidl->notifyResolution(width, height);
 }
 
-status_t CryptoHal::setMediaDrmSession(const Vector<uint8_t> &sessionId) {
-    Mutex::Autolock autoLock(mLock);
+status_t CryptoHal::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->setMediaDrmSession(sessionId);
+    return mCryptoHalHidl->setMediaDrmSession(sessionId);
+}
 
-    if (mInitCheck != OK) {
-        return mInitCheck;
+ssize_t CryptoHal::decrypt(const uint8_t key[16], const uint8_t iv[16], CryptoPlugin::Mode mode,
+                           const CryptoPlugin::Pattern& pattern, const ::SharedBuffer& source,
+                           size_t offset, const CryptoPlugin::SubSample* subSamples,
+                           size_t numSubSamples, const ::DestinationBuffer& destination,
+                           AString* errorDetailMsg) {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK)
+        return mCryptoHalAidl->decrypt(key, iv, mode, pattern, source, offset, subSamples,
+                                       numSubSamples, destination, errorDetailMsg);
+    return mCryptoHalHidl->decrypt(key, iv, mode, pattern, source, offset, subSamples,
+                                   numSubSamples, destination, errorDetailMsg);
+}
+
+int32_t CryptoHal::setHeap(const sp<HidlMemory>& heap) {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->setHeap(heap);
+    return mCryptoHalHidl->setHeap(heap);
+}
+
+void CryptoHal::unsetHeap(int32_t seqNum) {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK) {
+        mCryptoHalAidl->unsetHeap(seqNum);
+        return;
     }
 
-    auto err = mPlugin->setMediaDrmSession(toHidlVec(sessionId));
-    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+    mCryptoHalHidl->unsetHeap(seqNum);
 }
 
-status_t CryptoHal::getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const {
-    Mutex::Autolock autoLock(mLock);
-    return DrmUtils::GetLogMessages<drm::V1_4::ICryptoPlugin>(mPlugin, logs);
+status_t CryptoHal::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    // This requires plugin to be created.
+    if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->getLogMessages(logs);
+    return mCryptoHalHidl->getLogMessages(logs);
 }
-}  // namespace android
+
+}  // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
new file mode 100644
index 0000000..8b9d1de
--- /dev/null
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -0,0 +1,425 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CryptoHalAidl"
+
+#include <aidlcommonsupport/NativeHandle.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <media/hardware/CryptoAPI.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <mediadrm/CryptoHalAidl.h>
+#include <mediadrm/DrmUtils.h>
+
+using ::aidl::android::hardware::drm::CryptoSchemes;
+using DestinationBufferAidl = ::aidl::android::hardware::drm::DestinationBuffer;
+using ::aidl::android::hardware::drm::Mode;
+using ::aidl::android::hardware::drm::Pattern;
+using SharedBufferAidl = ::aidl::android::hardware::drm::SharedBuffer;
+using ::aidl::android::hardware::drm::Status;
+using ::aidl::android::hardware::drm::SubSample;
+using ::aidl::android::hardware::drm::Uuid;
+using ::aidl::android::hardware::drm::SecurityLevel;
+using NativeHandleAidlCommon = ::aidl::android::hardware::common::NativeHandle;
+using ::aidl::android::hardware::drm::DecryptArgs;
+
+using ::android::sp;
+using ::android::DrmUtils::statusAidlToStatusT;
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::HidlMemory;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+using ::aidl::android::hardware::drm::Uuid;
+// -------Hidl interface related-----------------
+// TODO: replace before removing hidl interface
+
+using BufferTypeHidl = ::android::hardware::drm::V1_0::BufferType;
+using SharedBufferHidl = ::android::hardware::drm::V1_0::SharedBuffer;
+using DestinationBufferHidl = ::android::hardware::drm::V1_0::DestinationBuffer;
+
+// -------Hidl interface related end-------------
+
+namespace android {
+
+template <typename Byte = uint8_t>
+static std::vector<Byte> toStdVec(const Vector<uint8_t>& vector) {
+    auto v = reinterpret_cast<const Byte*>(vector.array());
+    std::vector<Byte> vec(v, v + vector.size());
+    return vec;
+}
+
+// -------Hidl interface related-----------------
+// TODO: replace before removing hidl interface
+status_t CryptoHalAidl::checkSharedBuffer(const SharedBufferHidl& buffer) {
+    int32_t seqNum = static_cast<int32_t>(buffer.bufferId);
+    // memory must be in one of the heaps that have been set
+    if (mHeapSizes.indexOfKey(seqNum) < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    // memory must be within the address space of the heap
+    size_t heapSize = mHeapSizes.valueFor(seqNum);
+    if (heapSize < buffer.offset + buffer.size || SIZE_MAX - buffer.offset < buffer.size) {
+        android_errorWriteLog(0x534e4554, "76221123");
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+static SharedBufferAidl hidlSharedBufferToAidlSharedBuffer(const SharedBufferHidl& buffer) {
+    SharedBufferAidl aidlsb;
+    aidlsb.bufferId = buffer.bufferId;
+    aidlsb.offset = buffer.offset;
+    aidlsb.size = buffer.size;
+    return aidlsb;
+}
+
+static DestinationBufferAidl hidlDestinationBufferToAidlDestinationBuffer(
+        const DestinationBufferHidl& buffer) {
+    DestinationBufferAidl aidldb;
+    // skip negative convert check as count of enum elements are 2
+    switch(buffer.type) {
+        case BufferTypeHidl::SHARED_MEMORY:
+            aidldb.set<DestinationBufferAidl::Tag::nonsecureMemory>(
+                hidlSharedBufferToAidlSharedBuffer(buffer.nonsecureMemory));
+            break;
+        default:
+            auto handle = buffer.secureMemory.getNativeHandle();
+            if (handle) {
+                aidldb.set<DestinationBufferAidl::Tag::secureMemory>(
+                    ::android::dupToAidl(handle));
+            } else {
+                NativeHandleAidlCommon emptyhandle;
+                aidldb.set<DestinationBufferAidl::Tag::secureMemory>(
+                    std::move(emptyhandle));
+            }
+            break;
+    }
+
+    return aidldb;
+}
+
+static hidl_vec<uint8_t> toHidlVec(const void* ptr, size_t size) {
+    hidl_vec<uint8_t> vec;
+    vec.resize(size);
+    memcpy(vec.data(), ptr, size);
+    return vec;
+}
+
+static const Vector<uint8_t> toVector(const std::vector<uint8_t>& vec) {
+    Vector<uint8_t> vector;
+    vector.appendArray(vec.data(), vec.size());
+    return *const_cast<const Vector<uint8_t>*>(&vector);
+}
+
+static String8 toString8(const std::string& string) {
+    return String8(string.c_str());
+}
+
+static std::vector<uint8_t> toStdVec(const uint8_t* ptr, size_t n) {
+    if (!ptr) {
+        return std::vector<uint8_t>();
+    }
+    return std::vector<uint8_t>(ptr, ptr + n);
+}
+
+// -------Hidl interface related end--------------
+
+bool CryptoHalAidl::isCryptoSchemeSupportedInternal(const uint8_t uuid[16], int* factoryIdx) {
+    Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
+    for (size_t i = 0; i < mFactories.size(); i++) {
+        CryptoSchemes schemes{};
+        if (mFactories[i]->getSupportedCryptoSchemes(&schemes).isOk()) {
+            if (std::count(schemes.uuids.begin(), schemes.uuids.end(), uuidAidl)) {
+                if (factoryIdx != NULL) *factoryIdx = i;
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+CryptoHalAidl::CryptoHalAidl()
+    : mFactories(DrmUtils::makeDrmFactoriesAidl()),
+      mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT),
+      mHeapSeqNum(0) {}
+
+CryptoHalAidl::~CryptoHalAidl() {}
+
+status_t CryptoHalAidl::initCheck() const {
+    return mInitCheck;
+}
+
+bool CryptoHalAidl::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+    Mutex::Autolock autoLock(mLock);
+
+    return isCryptoSchemeSupportedInternal(uuid, NULL);
+}
+
+status_t CryptoHalAidl::createPlugin(const uint8_t uuid[16], const void* data, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
+    std::vector<uint8_t> dataAidl = toStdVec(toVector(toHidlVec(data, size)));
+    int i = 0;
+    if (isCryptoSchemeSupportedInternal(uuid, &i)) {
+        mPlugin = makeCryptoPlugin(mFactories[i], uuidAidl, dataAidl);
+    }
+
+    if (mInitCheck == NO_INIT) {
+        mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
+    }
+
+    return mInitCheck;
+}
+
+std::shared_ptr<ICryptoPluginAidl> CryptoHalAidl::makeCryptoPlugin(
+        const std::shared_ptr<IDrmFactoryAidl>& factory, const Uuid& uuidAidl,
+        const std::vector<uint8_t> initData) {
+    std::shared_ptr<ICryptoPluginAidl> pluginAidl;
+    if (factory->createCryptoPlugin(uuidAidl, initData, &pluginAidl).isOk()) {
+        ALOGI("Create ICryptoPluginAidl. UUID:[%s]", uuidAidl.toString().c_str());
+    } else {
+        mInitCheck = DEAD_OBJECT;
+        ALOGE("Failed to create ICryptoPluginAidl. UUID:[%s]", uuidAidl.toString().c_str());
+    }
+
+    return pluginAidl;
+}
+
+status_t CryptoHalAidl::destroyPlugin() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    mPlugin.reset();
+    mInitCheck = NO_INIT;
+    return OK;
+}
+
+bool CryptoHalAidl::requiresSecureDecoderComponent(const char* mime) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return false;
+    }
+
+    std::string mimeStr = std::string(mime);
+    bool result;
+    if (!mPlugin->requiresSecureDecoderComponent(mimeStr, &result).isOk()) {
+        ALOGE("Failed to requiresSecureDecoderComponent. mime:[%s]", mime);
+        return false;
+    }
+
+    return result;
+}
+
+void CryptoHalAidl::notifyResolution(uint32_t width, uint32_t height) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return;
+    }
+
+    // Check negative width and height after type conversion
+    // Log error and return if any is negative
+    if ((int32_t)width < 0 || (int32_t)height < 0) {
+        ALOGE("Negative width: %d or height %d in notifyResolution", width, height);
+        return;
+    }
+
+    ::ndk::ScopedAStatus status = mPlugin->notifyResolution(width, height);
+    if (!status.isOk()) {
+        ALOGE("notifyResolution txn failed status code: %d", status.getServiceSpecificError());
+    }
+}
+
+status_t CryptoHalAidl::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    auto err = mPlugin->setMediaDrmSession(toStdVec(sessionId));
+    return statusAidlToStatusT(err);
+}
+
+ssize_t CryptoHalAidl::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
+                               CryptoPlugin::Mode mode, const CryptoPlugin::Pattern& pattern,
+                               const SharedBufferHidl& hSource, size_t offset,
+                               const CryptoPlugin::SubSample* subSamples, size_t numSubSamples,
+                               const DestinationBufferHidl& hDestination, AString* errorDetailMsg) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    Mode aMode;
+    switch (mode) {
+        case CryptoPlugin::kMode_Unencrypted:
+            aMode = Mode::UNENCRYPTED;
+            break;
+        case CryptoPlugin::kMode_AES_CTR:
+            aMode = Mode::AES_CTR;
+            break;
+        case CryptoPlugin::kMode_AES_WV:
+            aMode = Mode::AES_CBC_CTS;
+            break;
+        case CryptoPlugin::kMode_AES_CBC:
+            aMode = Mode::AES_CBC;
+            break;
+        default:
+            return UNKNOWN_ERROR;
+    }
+
+    Pattern aPattern;
+    aPattern.encryptBlocks = pattern.mEncryptBlocks;
+    aPattern.skipBlocks = pattern.mSkipBlocks;
+
+    std::vector<SubSample> stdSubSamples;
+    for (size_t i = 0; i < numSubSamples; i++) {
+        SubSample subSample;
+        subSample.numBytesOfClearData = subSamples[i].mNumBytesOfClearData;
+        subSample.numBytesOfEncryptedData = subSamples[i].mNumBytesOfEncryptedData;
+        stdSubSamples.push_back(subSample);
+    }
+
+    bool secure;
+    if (hDestination.type == BufferTypeHidl::SHARED_MEMORY) {
+        status_t status = checkSharedBuffer(hDestination.nonsecureMemory);
+        if (status != OK) {
+            return status;
+        }
+        secure = false;
+    } else if (hDestination.type == BufferTypeHidl::NATIVE_HANDLE) {
+        secure = true;
+    } else {
+        android_errorWriteLog(0x534e4554, "70526702");
+        return UNKNOWN_ERROR;
+    }
+
+    status_t status = checkSharedBuffer(hSource);
+    if (status != OK) {
+        return status;
+    }
+
+    status_t err = UNKNOWN_ERROR;
+    mLock.unlock();
+
+    std::vector<uint8_t> keyIdAidl(toStdVec(keyId, 16));
+    std::vector<uint8_t> ivAidl(toStdVec(iv, 16));
+
+    DecryptArgs args;
+    args.secure = secure;
+    args.keyId = keyIdAidl;
+    args.iv = ivAidl;
+    args.mode = aMode;
+    args.pattern = aPattern;
+    args.subSamples = std::move(stdSubSamples);
+    args.source = hidlSharedBufferToAidlSharedBuffer(hSource);
+    args.offset = offset;
+    args.destination = hidlDestinationBufferToAidlDestinationBuffer(hDestination);
+
+
+    int32_t result = 0;
+    ::ndk::ScopedAStatus statusAidl = mPlugin->decrypt(args, &result);
+
+    err = statusAidlToStatusT(statusAidl);
+    std::string msgStr(statusAidl.getMessage());
+    if (errorDetailMsg != nullptr) {
+        *errorDetailMsg = toString8(msgStr);
+    }
+    if (err != OK) {
+        ALOGE("Failed on decrypt, error description:%s", statusAidl.getDescription().c_str());
+        return err;
+    }
+
+    return result;
+}
+
+int32_t CryptoHalAidl::setHeap(const sp<HidlMemory>& heap) {
+    if (heap == NULL || mHeapSeqNum < 0) {
+        ALOGE("setHeap(): heap %p mHeapSeqNum %d", heap.get(), mHeapSeqNum);
+        return -1;
+    }
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return -1;
+    }
+
+    int32_t seqNum = mHeapSeqNum++;
+    uint32_t bufferId = static_cast<uint32_t>(seqNum);
+    mHeapSizes.add(seqNum, heap->size());
+
+    SharedBufferAidl memAidl;
+    memAidl.handle = ::android::dupToAidl(heap->handle());
+    memAidl.size = heap->size();
+    memAidl.bufferId = bufferId;
+
+    auto status = mPlugin->setSharedBufferBase(memAidl);
+       ALOGE_IF(!status.isOk(),
+             "setSharedBufferBase(): remote call failed");
+    return seqNum;
+}
+
+void CryptoHalAidl::unsetHeap(int32_t seqNum) {
+    Mutex::Autolock autoLock(mLock);
+
+    /*
+     * Clear the remote shared memory mapping by setting the shared
+     * buffer base to a null hidl_memory.
+     *
+     * TODO: Add a releaseSharedBuffer method in a future DRM HAL
+     * API version to make this explicit.
+     */
+    ssize_t index = mHeapSizes.indexOfKey(seqNum);
+    if (index >= 0) {
+        if (mPlugin != NULL) {
+            uint32_t bufferId = static_cast<uint32_t>(seqNum);
+            SharedBufferAidl memAidl{};
+            memAidl.bufferId = bufferId;
+            auto status = mPlugin->setSharedBufferBase(memAidl);
+            ALOGE_IF(!status.isOk(),
+                     "setSharedBufferBase(): remote call failed");
+        }
+        mHeapSizes.removeItem(seqNum);
+    }
+}
+
+status_t CryptoHalAidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    Mutex::Autolock autoLock(mLock);
+    // Need to convert logmessage
+
+    return DrmUtils::GetLogMessagesAidl<ICryptoPluginAidl>(mPlugin, logs);
+}
+}  // namespace android
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
new file mode 100644
index 0000000..55364b5
--- /dev/null
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -0,0 +1,404 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CryptoHalHidl"
+#include <utils/Log.h>
+
+#include <android/hardware/drm/1.0/types.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <hidl/ServiceManagement.h>
+#include <hidlmemory/FrameworkUtils.h>
+#include <media/hardware/CryptoAPI.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <mediadrm/CryptoHalHidl.h>
+#include <mediadrm/DrmUtils.h>
+
+using drm::V1_0::BufferType;
+using drm::V1_0::DestinationBuffer;
+using drm::V1_0::ICryptoFactory;
+using drm::V1_0::ICryptoPlugin;
+using drm::V1_0::Mode;
+using drm::V1_0::Pattern;
+using drm::V1_0::SharedBuffer;
+using drm::V1_0::Status;
+using drm::V1_0::SubSample;
+
+using ::android::sp;
+using ::android::DrmUtils::toStatusT;
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::HidlMemory;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+typedef drm::V1_2::Status Status_V1_2;
+
+namespace android {
+
+static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t>& vector) {
+    hidl_vec<uint8_t> vec;
+    vec.setToExternal(const_cast<uint8_t*>(vector.array()), vector.size());
+    return vec;
+}
+
+static hidl_vec<uint8_t> toHidlVec(const void* ptr, size_t size) {
+    hidl_vec<uint8_t> vec;
+    vec.resize(size);
+    memcpy(vec.data(), ptr, size);
+    return vec;
+}
+
+static hidl_array<uint8_t, 16> toHidlArray16(const uint8_t* ptr) {
+    if (!ptr) {
+        return hidl_array<uint8_t, 16>();
+    }
+    return hidl_array<uint8_t, 16>(ptr);
+}
+
+static String8 toString8(hidl_string hString) {
+    return String8(hString.c_str());
+}
+
+CryptoHalHidl::CryptoHalHidl()
+    : mFactories(makeCryptoFactories()),
+      mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT),
+      mHeapSeqNum(0) {}
+
+CryptoHalHidl::~CryptoHalHidl() {}
+
+Vector<sp<ICryptoFactory>> CryptoHalHidl::makeCryptoFactories() {
+    Vector<sp<ICryptoFactory>> factories;
+
+    auto manager = hardware::defaultServiceManager1_2();
+    if (manager != NULL) {
+        manager->listManifestByInterface(
+                drm::V1_0::ICryptoFactory::descriptor,
+                [&factories](const hidl_vec<hidl_string>& registered) {
+                    for (const auto& instance : registered) {
+                        auto factory = drm::V1_0::ICryptoFactory::getService(instance);
+                        if (factory != NULL) {
+                            ALOGD("found drm@1.0 ICryptoFactory %s", instance.c_str());
+                            factories.push_back(factory);
+                        }
+                    }
+                });
+        manager->listManifestByInterface(
+                drm::V1_1::ICryptoFactory::descriptor,
+                [&factories](const hidl_vec<hidl_string>& registered) {
+                    for (const auto& instance : registered) {
+                        auto factory = drm::V1_1::ICryptoFactory::getService(instance);
+                        if (factory != NULL) {
+                            ALOGD("found drm@1.1 ICryptoFactory %s", instance.c_str());
+                            factories.push_back(factory);
+                        }
+                    }
+                });
+    }
+
+    if (factories.size() == 0) {
+        // must be in passthrough mode, load the default passthrough service
+        auto passthrough = ICryptoFactory::getService();
+        if (passthrough != NULL) {
+            ALOGI("makeCryptoFactories: using default passthrough crypto instance");
+            factories.push_back(passthrough);
+        } else {
+            ALOGE("Failed to find any crypto factories");
+        }
+    }
+    return factories;
+}
+
+sp<ICryptoPlugin> CryptoHalHidl::makeCryptoPlugin(const sp<ICryptoFactory>& factory,
+                                                  const uint8_t uuid[16], const void* initData,
+                                                  size_t initDataSize) {
+    sp<ICryptoPlugin> plugin;
+    Return<void> hResult =
+            factory->createPlugin(toHidlArray16(uuid), toHidlVec(initData, initDataSize),
+                                  [&](Status status, const sp<ICryptoPlugin>& hPlugin) {
+                                      if (status != Status::OK) {
+                                          ALOGE("Failed to make crypto plugin");
+                                          return;
+                                      }
+                                      plugin = hPlugin;
+                                  });
+    if (!hResult.isOk()) {
+        mInitCheck = DEAD_OBJECT;
+    }
+    return plugin;
+}
+
+status_t CryptoHalHidl::initCheck() const {
+    return mInitCheck;
+}
+
+bool CryptoHalHidl::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+    Mutex::Autolock autoLock(mLock);
+
+    for (size_t i = 0; i < mFactories.size(); i++) {
+        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+status_t CryptoHalHidl::createPlugin(const uint8_t uuid[16], const void* data, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    for (size_t i = 0; i < mFactories.size(); i++) {
+        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+            mPlugin = makeCryptoPlugin(mFactories[i], uuid, data, size);
+            if (mPlugin != NULL) {
+                mPluginV1_2 = drm::V1_2::ICryptoPlugin::castFrom(mPlugin);
+            }
+        }
+    }
+
+    if (mInitCheck == NO_INIT) {
+        mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
+    }
+
+    return mInitCheck;
+}
+
+status_t CryptoHalHidl::destroyPlugin() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    mPlugin.clear();
+    mPluginV1_2.clear();
+    mInitCheck = NO_INIT;
+    return OK;
+}
+
+bool CryptoHalHidl::requiresSecureDecoderComponent(const char* mime) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return false;
+    }
+
+    Return<bool> hResult = mPlugin->requiresSecureDecoderComponent(hidl_string(mime));
+    if (!hResult.isOk()) {
+        return false;
+    }
+    return hResult;
+}
+
+/**
+ * If the heap base isn't set, get the heap base from the HidlMemory
+ * and send it to the HAL so it can map a remote heap of the same
+ * size.  Once the heap base is established, shared memory buffers
+ * are sent by providing an offset into the heap and a buffer size.
+ */
+int32_t CryptoHalHidl::setHeapBase(const sp<HidlMemory>& heap) {
+    if (heap == NULL || mHeapSeqNum < 0) {
+        ALOGE("setHeapBase(): heap %p mHeapSeqNum %d", heap.get(), mHeapSeqNum);
+        return -1;
+    }
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return -1;
+    }
+
+    int32_t seqNum = mHeapSeqNum++;
+    uint32_t bufferId = static_cast<uint32_t>(seqNum);
+    mHeapSizes.add(seqNum, heap->size());
+    Return<void> hResult = mPlugin->setSharedBufferBase(*heap, bufferId);
+    ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
+    return seqNum;
+}
+
+void CryptoHalHidl::clearHeapBase(int32_t seqNum) {
+    Mutex::Autolock autoLock(mLock);
+
+    /*
+     * Clear the remote shared memory mapping by setting the shared
+     * buffer base to a null hidl_memory.
+     *
+     * TODO: Add a releaseSharedBuffer method in a future DRM HAL
+     * API version to make this explicit.
+     */
+    ssize_t index = mHeapSizes.indexOfKey(seqNum);
+    if (index >= 0) {
+        if (mPlugin != NULL) {
+            uint32_t bufferId = static_cast<uint32_t>(seqNum);
+            Return<void> hResult = mPlugin->setSharedBufferBase(hidl_memory(), bufferId);
+            ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
+        }
+        mHeapSizes.removeItem(seqNum);
+    }
+}
+
+status_t CryptoHalHidl::checkSharedBuffer(const ::SharedBuffer& buffer) {
+    int32_t seqNum = static_cast<int32_t>(buffer.bufferId);
+    // memory must be in one of the heaps that have been set
+    if (mHeapSizes.indexOfKey(seqNum) < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    // memory must be within the address space of the heap
+    size_t heapSize = mHeapSizes.valueFor(seqNum);
+    if (heapSize < buffer.offset + buffer.size || SIZE_MAX - buffer.offset < buffer.size) {
+        android_errorWriteLog(0x534e4554, "76221123");
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+ssize_t CryptoHalHidl::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
+                               CryptoPlugin::Mode mode, const CryptoPlugin::Pattern& pattern,
+                               const drm::V1_0::SharedBuffer& hSource, size_t offset,
+                               const CryptoPlugin::SubSample* subSamples, size_t numSubSamples,
+                               const drm::V1_0::DestinationBuffer& hDestination,
+                               AString* errorDetailMsg) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    Mode hMode;
+    switch (mode) {
+        case CryptoPlugin::kMode_Unencrypted:
+            hMode = Mode::UNENCRYPTED;
+            break;
+        case CryptoPlugin::kMode_AES_CTR:
+            hMode = Mode::AES_CTR;
+            break;
+        case CryptoPlugin::kMode_AES_WV:
+            hMode = Mode::AES_CBC_CTS;
+            break;
+        case CryptoPlugin::kMode_AES_CBC:
+            hMode = Mode::AES_CBC;
+            break;
+        default:
+            return UNKNOWN_ERROR;
+    }
+
+    Pattern hPattern;
+    hPattern.encryptBlocks = pattern.mEncryptBlocks;
+    hPattern.skipBlocks = pattern.mSkipBlocks;
+
+    std::vector<SubSample> stdSubSamples;
+    for (size_t i = 0; i < numSubSamples; i++) {
+        SubSample subSample;
+        subSample.numBytesOfClearData = subSamples[i].mNumBytesOfClearData;
+        subSample.numBytesOfEncryptedData = subSamples[i].mNumBytesOfEncryptedData;
+        stdSubSamples.push_back(subSample);
+    }
+    auto hSubSamples = hidl_vec<SubSample>(stdSubSamples);
+
+    bool secure;
+    if (hDestination.type == BufferType::SHARED_MEMORY) {
+        status_t status = checkSharedBuffer(hDestination.nonsecureMemory);
+        if (status != OK) {
+            return status;
+        }
+        secure = false;
+    } else if (hDestination.type == BufferType::NATIVE_HANDLE) {
+        secure = true;
+    } else {
+        android_errorWriteLog(0x534e4554, "70526702");
+        return UNKNOWN_ERROR;
+    }
+
+    status_t status = checkSharedBuffer(hSource);
+    if (status != OK) {
+        return status;
+    }
+
+    status_t err = UNKNOWN_ERROR;
+    uint32_t bytesWritten = 0;
+
+    Return<void> hResult;
+
+    mLock.unlock();
+    if (mPluginV1_2 != NULL) {
+        hResult = mPluginV1_2->decrypt_1_2(
+                secure, toHidlArray16(keyId), toHidlArray16(iv), hMode, hPattern, hSubSamples,
+                hSource, offset, hDestination,
+                [&](Status_V1_2 status, uint32_t hBytesWritten, hidl_string hDetailedError) {
+                    if (status == Status_V1_2::OK) {
+                        bytesWritten = hBytesWritten;
+                        if (errorDetailMsg != nullptr) {
+                            *errorDetailMsg = toString8(hDetailedError);
+                        }
+                    }
+                    err = toStatusT(status);
+                });
+    } else {
+        hResult = mPlugin->decrypt(
+                secure, toHidlArray16(keyId), toHidlArray16(iv), hMode, hPattern, hSubSamples,
+                hSource, offset, hDestination,
+                [&](Status status, uint32_t hBytesWritten, hidl_string hDetailedError) {
+                    if (status == Status::OK) {
+                        bytesWritten = hBytesWritten;
+                        if (errorDetailMsg != nullptr) {
+                            *errorDetailMsg = toString8(hDetailedError);
+                        }
+                    }
+                    err = toStatusT(status);
+                });
+    }
+
+    err = hResult.isOk() ? err : DEAD_OBJECT;
+    if (err == OK) {
+        return bytesWritten;
+    }
+    return err;
+}
+
+void CryptoHalHidl::notifyResolution(uint32_t width, uint32_t height) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return;
+    }
+
+    auto hResult = mPlugin->notifyResolution(width, height);
+    ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
+}
+
+status_t CryptoHalHidl::setMediaDrmSession(const Vector<uint8_t>& sessionId) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    auto err = mPlugin->setMediaDrmSession(toHidlVec(sessionId));
+    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+}
+
+status_t CryptoHalHidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    Mutex::Autolock autoLock(mLock);
+    return DrmUtils::GetLogMessages<drm::V1_4::ICryptoPlugin>(mPlugin, logs);
+}
+}  // namespace android
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 40d1e0c..c394d5a 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -17,1557 +17,280 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "DrmHal"
 
-#include <aidl/android/media/BnResourceManagerClient.h>
-#include <android/binder_manager.h>
-#include <android/hardware/drm/1.2/types.h>
-#include <android/hidl/manager/1.2/IServiceManager.h>
-#include <hidl/ServiceManagement.h>
-#include <media/EventMetric.h>
-#include <media/MediaMetrics.h>
-#include <media/PluginMetricsReporting.h>
-#include <media/drm/DrmAPI.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/foundation/base64.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
 #include <mediadrm/DrmHal.h>
-#include <mediadrm/DrmSessionClientInterface.h>
-#include <mediadrm/DrmSessionManager.h>
-#include <mediadrm/IDrmMetricsConsumer.h>
+#include <mediadrm/DrmHalAidl.h>
+#include <mediadrm/DrmHalHidl.h>
 #include <mediadrm/DrmUtils.h>
-#include <utils/Log.h>
-
-#include <iomanip>
-#include <vector>
-
-using drm::V1_0::KeyedVector;
-using drm::V1_0::KeyRequestType;
-using drm::V1_0::KeyType;
-using drm::V1_0::KeyValue;
-using drm::V1_0::SecureStop;
-using drm::V1_0::SecureStopId;
-using drm::V1_0::Status;
-using drm::V1_1::HdcpLevel;
-using drm::V1_1::SecureStopRelease;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeySetId;
-using drm::V1_2::KeyStatusType;
-using ::android::DrmUtils::toStatusT;
-using ::android::hardware::drm::V1_1::DrmMetricGroup;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::os::PersistableBundle;
-using ::android::sp;
-
-typedef drm::V1_1::KeyRequestType KeyRequestType_V1_1;
-typedef drm::V1_2::Status Status_V1_2;
-typedef drm::V1_2::HdcpLevel HdcpLevel_V1_2;
-
-namespace {
-
-// This constant corresponds to the PROPERTY_DEVICE_UNIQUE_ID constant
-// in the MediaDrm API.
-constexpr char kPropertyDeviceUniqueId[] = "deviceUniqueId";
-constexpr char kEqualsSign[] = "=";
-
-template<typename T>
-std::string toBase64StringNoPad(const T* data, size_t size) {
-    // Note that the base 64 conversion only works with arrays of single-byte
-    // values. If the source is empty or is not an array of single-byte values,
-    // return empty string.
-    if (size == 0 || sizeof(data[0]) != 1) {
-      return "";
-    }
-
-    android::AString outputString;
-    encodeBase64(data, size, &outputString);
-    // Remove trailing equals padding if it exists.
-    while (outputString.size() > 0 && outputString.endsWith(kEqualsSign)) {
-        outputString.erase(outputString.size() - 1, 1);
-    }
-
-    return std::string(outputString.c_str(), outputString.size());
-}
-
-}  // anonymous namespace
 
 namespace android {
 
-#define INIT_CHECK() {if (mInitCheck != OK) return mInitCheck;}
-
-static const Vector<uint8_t> toVector(const hidl_vec<uint8_t> &vec) {
-    Vector<uint8_t> vector;
-    vector.appendArray(vec.data(), vec.size());
-    return *const_cast<const Vector<uint8_t> *>(&vector);
+DrmHal::DrmHal() {
+    mDrmHalHidl = sp<DrmHalHidl>::make();
+    mDrmHalAidl = sp<DrmHalAidl>::make();
 }
 
-static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t> &vector) {
-    hidl_vec<uint8_t> vec;
-    vec.setToExternal(const_cast<uint8_t *>(vector.array()), vector.size());
-    return vec;
-}
-
-static String8 toString8(const hidl_string &string) {
-    return String8(string.c_str());
-}
-
-static hidl_string toHidlString(const String8& string) {
-    return hidl_string(string.string());
-}
-
-static DrmPlugin::SecurityLevel toSecurityLevel(SecurityLevel level) {
-    switch(level) {
-    case SecurityLevel::SW_SECURE_CRYPTO:
-        return DrmPlugin::kSecurityLevelSwSecureCrypto;
-    case SecurityLevel::SW_SECURE_DECODE:
-        return DrmPlugin::kSecurityLevelSwSecureDecode;
-    case SecurityLevel::HW_SECURE_CRYPTO:
-        return DrmPlugin::kSecurityLevelHwSecureCrypto;
-    case SecurityLevel::HW_SECURE_DECODE:
-        return DrmPlugin::kSecurityLevelHwSecureDecode;
-    case SecurityLevel::HW_SECURE_ALL:
-        return DrmPlugin::kSecurityLevelHwSecureAll;
-    default:
-        return DrmPlugin::kSecurityLevelUnknown;
-    }
-}
-
-static SecurityLevel toHidlSecurityLevel(DrmPlugin::SecurityLevel level) {
-    switch(level) {
-    case DrmPlugin::kSecurityLevelSwSecureCrypto:
-        return SecurityLevel::SW_SECURE_CRYPTO;
-    case DrmPlugin::kSecurityLevelSwSecureDecode:
-        return SecurityLevel::SW_SECURE_DECODE;
-    case DrmPlugin::kSecurityLevelHwSecureCrypto:
-        return SecurityLevel::HW_SECURE_CRYPTO;
-    case DrmPlugin::kSecurityLevelHwSecureDecode:
-        return SecurityLevel::HW_SECURE_DECODE;
-    case DrmPlugin::kSecurityLevelHwSecureAll:
-        return SecurityLevel::HW_SECURE_ALL;
-    default:
-        return SecurityLevel::UNKNOWN;
-    }
-}
-
-static DrmPlugin::OfflineLicenseState toOfflineLicenseState(
-        OfflineLicenseState licenseState) {
-    switch(licenseState) {
-    case OfflineLicenseState::USABLE:
-        return DrmPlugin::kOfflineLicenseStateUsable;
-    case OfflineLicenseState::INACTIVE:
-        return DrmPlugin::kOfflineLicenseStateReleased;
-    default:
-        return DrmPlugin::kOfflineLicenseStateUnknown;
-    }
-}
-
-static DrmPlugin::HdcpLevel toHdcpLevel(HdcpLevel_V1_2 level) {
-    switch(level) {
-    case HdcpLevel_V1_2::HDCP_NONE:
-        return DrmPlugin::kHdcpNone;
-    case HdcpLevel_V1_2::HDCP_V1:
-        return DrmPlugin::kHdcpV1;
-    case HdcpLevel_V1_2::HDCP_V2:
-        return DrmPlugin::kHdcpV2;
-    case HdcpLevel_V1_2::HDCP_V2_1:
-        return DrmPlugin::kHdcpV2_1;
-    case HdcpLevel_V1_2::HDCP_V2_2:
-        return DrmPlugin::kHdcpV2_2;
-    case HdcpLevel_V1_2::HDCP_V2_3:
-        return DrmPlugin::kHdcpV2_3;
-    case HdcpLevel_V1_2::HDCP_NO_OUTPUT:
-        return DrmPlugin::kHdcpNoOutput;
-    default:
-        return DrmPlugin::kHdcpLevelUnknown;
-    }
-}
-static ::KeyedVector toHidlKeyedVector(const KeyedVector<String8, String8>&
-        keyedVector) {
-    std::vector<KeyValue> stdKeyedVector;
-    for (size_t i = 0; i < keyedVector.size(); i++) {
-        KeyValue keyValue;
-        keyValue.key = toHidlString(keyedVector.keyAt(i));
-        keyValue.value = toHidlString(keyedVector.valueAt(i));
-        stdKeyedVector.push_back(keyValue);
-    }
-    return ::KeyedVector(stdKeyedVector);
-}
-
-static KeyedVector<String8, String8> toKeyedVector(const ::KeyedVector&
-        hKeyedVector) {
-    KeyedVector<String8, String8> keyedVector;
-    for (size_t i = 0; i < hKeyedVector.size(); i++) {
-        keyedVector.add(toString8(hKeyedVector[i].key),
-                toString8(hKeyedVector[i].value));
-    }
-    return keyedVector;
-}
-
-static List<Vector<uint8_t>> toSecureStops(const hidl_vec<SecureStop>&
-        hSecureStops) {
-    List<Vector<uint8_t>> secureStops;
-    for (size_t i = 0; i < hSecureStops.size(); i++) {
-        secureStops.push_back(toVector(hSecureStops[i].opaqueData));
-    }
-    return secureStops;
-}
-
-static List<Vector<uint8_t>> toSecureStopIds(const hidl_vec<SecureStopId>&
-        hSecureStopIds) {
-    List<Vector<uint8_t>> secureStopIds;
-    for (size_t i = 0; i < hSecureStopIds.size(); i++) {
-        secureStopIds.push_back(toVector(hSecureStopIds[i]));
-    }
-    return secureStopIds;
-}
-
-static List<Vector<uint8_t>> toKeySetIds(const hidl_vec<KeySetId>&
-        hKeySetIds) {
-    List<Vector<uint8_t>> keySetIds;
-    for (size_t i = 0; i < hKeySetIds.size(); i++) {
-        keySetIds.push_back(toVector(hKeySetIds[i]));
-    }
-    return keySetIds;
-}
-
-Mutex DrmHal::mLock;
-
-struct DrmHal::DrmSessionClient : public aidl::android::media::BnResourceManagerClient {
-    explicit DrmSessionClient(DrmHal* drm, const Vector<uint8_t>& sessionId)
-      : mSessionId(sessionId),
-        mDrm(drm) {}
-
-    ::ndk::ScopedAStatus reclaimResource(bool* _aidl_return) override;
-    ::ndk::ScopedAStatus getName(::std::string* _aidl_return) override;
-
-    const Vector<uint8_t> mSessionId;
-
-    virtual ~DrmSessionClient();
-
-private:
-    wp<DrmHal> mDrm;
-
-    DISALLOW_EVIL_CONSTRUCTORS(DrmSessionClient);
-};
-
-::ndk::ScopedAStatus DrmHal::DrmSessionClient::reclaimResource(bool* _aidl_return) {
-    auto sessionId = mSessionId;
-    sp<DrmHal> drm = mDrm.promote();
-    if (drm == NULL) {
-        *_aidl_return = true;
-        return ::ndk::ScopedAStatus::ok();
-    }
-    status_t err = drm->closeSession(sessionId);
-    if (err != OK) {
-        *_aidl_return = false;
-        return ::ndk::ScopedAStatus::ok();
-    }
-    drm->sendEvent(EventType::SESSION_RECLAIMED,
-            toHidlVec(sessionId), hidl_vec<uint8_t>());
-    *_aidl_return = true;
-    return ::ndk::ScopedAStatus::ok();
-}
-
-::ndk::ScopedAStatus DrmHal::DrmSessionClient::getName(::std::string* _aidl_return) {
-    String8 name;
-    sp<DrmHal> drm = mDrm.promote();
-    if (drm == NULL) {
-        name.append("<deleted>");
-    } else if (drm->getPropertyStringInternal(String8("vendor"), name) != OK
-        || name.isEmpty()) {
-      name.append("<Get vendor failed or is empty>");
-    }
-    name.append("[");
-    for (size_t i = 0; i < mSessionId.size(); ++i) {
-        name.appendFormat("%02x", mSessionId[i]);
-    }
-    name.append("]");
-    *_aidl_return = name;
-    return ::ndk::ScopedAStatus::ok();
-}
-
-DrmHal::DrmSessionClient::~DrmSessionClient() {
-    DrmSessionManager::Instance()->removeSession(mSessionId);
-}
-
-DrmHal::DrmHal()
-   : mFactories(makeDrmFactories()),
-     mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {
-}
-
-void DrmHal::closeOpenSessions() {
-    Mutex::Autolock autoLock(mLock);
-    auto openSessions = mOpenSessions;
-    for (size_t i = 0; i < openSessions.size(); i++) {
-        mLock.unlock();
-        closeSession(openSessions[i]->mSessionId);
-        mLock.lock();
-    }
-    mOpenSessions.clear();
-}
-
-DrmHal::~DrmHal() {
-}
-
-void DrmHal::cleanup() {
-    closeOpenSessions();
-
-    Mutex::Autolock autoLock(mLock);
-    reportFrameworkMetrics(reportPluginMetrics());
-
-    setListener(NULL);
-    mInitCheck = NO_INIT;
-    if (mPluginV1_2 != NULL) {
-        if (!mPluginV1_2->setListener(NULL).isOk()) {
-            mInitCheck = DEAD_OBJECT;
-        }
-    } else if (mPlugin != NULL) {
-        if (!mPlugin->setListener(NULL).isOk()) {
-            mInitCheck = DEAD_OBJECT;
-        }
-    }
-    mPlugin.clear();
-    mPluginV1_1.clear();
-    mPluginV1_2.clear();
-    mPluginV1_4.clear();
-}
-
-std::vector<sp<IDrmFactory>> DrmHal::makeDrmFactories() {
-    static std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
-    if (factories.size() == 0) {
-        // must be in passthrough mode, load the default passthrough service
-        auto passthrough = IDrmFactory::getService();
-        if (passthrough != NULL) {
-            DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
-            factories.push_back(passthrough);
-        } else {
-            DrmUtils::LOG2BE("Failed to find any drm factories");
-        }
-    }
-    return factories;
-}
-
-sp<IDrmPlugin> DrmHal::makeDrmPlugin(const sp<IDrmFactory>& factory,
-        const uint8_t uuid[16], const String8& appPackageName) {
-    mAppPackageName = appPackageName;
-    mMetrics.SetAppPackageName(appPackageName);
-    mMetrics.SetAppUid(AIBinder_getCallingUid());
-
-    sp<IDrmPlugin> plugin;
-    Return<void> hResult = factory->createPlugin(uuid, appPackageName.string(),
-            [&](Status status, const sp<IDrmPlugin>& hPlugin) {
-                if (status != Status::OK) {
-                    DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d", status);
-                    return;
-                }
-                plugin = hPlugin;
-            }
-        );
-
-    if (!hResult.isOk()) {
-        DrmUtils::LOG2BE(uuid, "createPlugin remote call failed: %s",
-                         hResult.description().c_str());
-    }
-
-    return plugin;
-}
+DrmHal::~DrmHal() {}
 
 status_t DrmHal::initCheck() const {
-    return mInitCheck;
+    if (mDrmHalAidl->initCheck() == OK || mDrmHalHidl->initCheck() == OK) return OK;
+    if (mDrmHalAidl->initCheck() == NO_INIT || mDrmHalHidl->initCheck() == NO_INIT) return NO_INIT;
+    return mDrmHalHidl->initCheck();
 }
 
-status_t DrmHal::setListener(const sp<IDrmClient>& listener)
-{
-    Mutex::Autolock lock(mEventLock);
-    mListener = listener;
-    return NO_ERROR;
+status_t DrmHal::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                         DrmPlugin::SecurityLevel securityLevel, bool* result) {
+    status_t statusResult;
+    statusResult = mDrmHalAidl->isCryptoSchemeSupported(uuid, mimeType, securityLevel, result);
+    if (*result) return statusResult;
+    return mDrmHalHidl->isCryptoSchemeSupported(uuid, mimeType, securityLevel, result);
 }
 
-Return<void> DrmHal::sendEvent(EventType hEventType,
-        const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& data) {
-    mMetrics.mEventCounter.Increment(hEventType);
-
-    mEventLock.lock();
-    sp<IDrmClient> listener = mListener;
-    mEventLock.unlock();
-
-    if (listener != NULL) {
-        Mutex::Autolock lock(mNotifyLock);
-        DrmPlugin::EventType eventType;
-        switch(hEventType) {
-        case EventType::PROVISION_REQUIRED:
-            eventType = DrmPlugin::kDrmPluginEventProvisionRequired;
-            break;
-        case EventType::KEY_NEEDED:
-            eventType = DrmPlugin::kDrmPluginEventKeyNeeded;
-            break;
-        case EventType::KEY_EXPIRED:
-            eventType = DrmPlugin::kDrmPluginEventKeyExpired;
-            break;
-        case EventType::VENDOR_DEFINED:
-            eventType = DrmPlugin::kDrmPluginEventVendorDefined;
-            break;
-        case EventType::SESSION_RECLAIMED:
-            eventType = DrmPlugin::kDrmPluginEventSessionReclaimed;
-            break;
-        default:
-            return Void();
-        }
-        listener->sendEvent(eventType, sessionId, data);
-    }
-    return Void();
-}
-
-Return<void> DrmHal::sendExpirationUpdate(const hidl_vec<uint8_t>& sessionId,
-        int64_t expiryTimeInMS) {
-
-    mEventLock.lock();
-    sp<IDrmClient> listener = mListener;
-    mEventLock.unlock();
-
-    if (listener != NULL) {
-        Mutex::Autolock lock(mNotifyLock);
-        listener->sendExpirationUpdate(sessionId, expiryTimeInMS);
-    }
-    return Void();
-}
-
-Return<void> DrmHal::sendKeysChange(const hidl_vec<uint8_t>& sessionId,
-        const hidl_vec<KeyStatus_V1_0>& keyStatusList_V1_0, bool hasNewUsableKey) {
-    std::vector<KeyStatus> keyStatusVec;
-    for (const auto &keyStatus_V1_0 : keyStatusList_V1_0) {
-        keyStatusVec.push_back({keyStatus_V1_0.keyId,
-                static_cast<KeyStatusType>(keyStatus_V1_0.type)});
-    }
-    hidl_vec<KeyStatus> keyStatusList_V1_2(keyStatusVec);
-    return sendKeysChange_1_2(sessionId, keyStatusList_V1_2, hasNewUsableKey);
-}
-
-Return<void> DrmHal::sendKeysChange_1_2(const hidl_vec<uint8_t>& sessionId,
-        const hidl_vec<KeyStatus>& hKeyStatusList, bool hasNewUsableKey) {
-
-    mEventLock.lock();
-    sp<IDrmClient> listener = mListener;
-    mEventLock.unlock();
-
-    if (listener != NULL) {
-        std::vector<DrmKeyStatus> keyStatusList;
-        size_t nKeys = hKeyStatusList.size();
-        for (size_t i = 0; i < nKeys; ++i) {
-            const KeyStatus &keyStatus = hKeyStatusList[i];
-            uint32_t type;
-            switch(keyStatus.type) {
-            case KeyStatusType::USABLE:
-                type = DrmPlugin::kKeyStatusType_Usable;
-                break;
-            case KeyStatusType::EXPIRED:
-                type = DrmPlugin::kKeyStatusType_Expired;
-                break;
-            case KeyStatusType::OUTPUTNOTALLOWED:
-                type = DrmPlugin::kKeyStatusType_OutputNotAllowed;
-                break;
-            case KeyStatusType::STATUSPENDING:
-                type = DrmPlugin::kKeyStatusType_StatusPending;
-                break;
-            case KeyStatusType::USABLEINFUTURE:
-                type = DrmPlugin::kKeyStatusType_UsableInFuture;
-                break;
-            case KeyStatusType::INTERNALERROR:
-            default:
-                type = DrmPlugin::kKeyStatusType_InternalError;
-                break;
-            }
-            keyStatusList.push_back({type, keyStatus.keyId});
-            mMetrics.mKeyStatusChangeCounter.Increment(keyStatus.type);
-        }
-
-        Mutex::Autolock lock(mNotifyLock);
-        listener->sendKeysChange(sessionId, keyStatusList, hasNewUsableKey);
-    } else {
-        // There's no listener. But we still want to count the key change
-        // events.
-        size_t nKeys = hKeyStatusList.size();
-        for (size_t i = 0; i < nKeys; i++) {
-            mMetrics.mKeyStatusChangeCounter.Increment(hKeyStatusList[i].type);
-        }
-    }
-
-    return Void();
-}
-
-Return<void> DrmHal::sendSessionLostState(
-        const hidl_vec<uint8_t>& sessionId) {
-
-    mEventLock.lock();
-    sp<IDrmClient> listener = mListener;
-    mEventLock.unlock();
-
-    if (listener != NULL) {
-        Mutex::Autolock lock(mNotifyLock);
-        listener->sendSessionLostState(sessionId);
-    }
-    return Void();
-}
-
-status_t DrmHal::matchMimeTypeAndSecurityLevel(const sp<IDrmFactory> &factory,
-                                               const uint8_t uuid[16],
-                                               const String8 &mimeType,
-                                               DrmPlugin::SecurityLevel level,
-                                               bool *isSupported) {
-    *isSupported = false;
-
-    // handle default value cases
-    if (level == DrmPlugin::kSecurityLevelUnknown) {
-        if (mimeType == "") {
-            // isCryptoSchemeSupported(uuid)
-            *isSupported = true;
-        } else {
-            // isCryptoSchemeSupported(uuid, mimeType)
-            *isSupported = factory->isContentTypeSupported(mimeType.string());
-        }
-        return OK;
-    } else if (mimeType == "") {
-        return BAD_VALUE;
-    }
-
-    sp<drm::V1_2::IDrmFactory> factoryV1_2 = drm::V1_2::IDrmFactory::castFrom(factory);
-    if (factoryV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
-    } else {
-        *isSupported = factoryV1_2->isCryptoSchemeSupported_1_2(uuid,
-                mimeType.string(), toHidlSecurityLevel(level));
-        return OK;
-    }
-}
-
-status_t DrmHal::isCryptoSchemeSupported(const uint8_t uuid[16],
-                                         const String8 &mimeType,
-                                         DrmPlugin::SecurityLevel level,
-                                         bool *isSupported) {
-    Mutex::Autolock autoLock(mLock);
-    *isSupported = false;
-    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
-        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
-            return matchMimeTypeAndSecurityLevel(mFactories[i],
-                    uuid, mimeType, level, isSupported);
-        }
-    }
-    return OK;
-}
-
-status_t DrmHal::createPlugin(const uint8_t uuid[16],
-        const String8& appPackageName) {
-    Mutex::Autolock autoLock(mLock);
-
-    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
-        auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
-        if (hResult.isOk() && hResult) {
-            auto plugin = makeDrmPlugin(mFactories[i], uuid, appPackageName);
-            if (plugin != NULL) {
-                mPlugin = plugin;
-                mPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mPlugin);
-                mPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mPlugin);
-                mPluginV1_4 = drm::V1_4::IDrmPlugin::castFrom(mPlugin);
-                break;
-            }
-        }
-    }
-
-    if (mPlugin == NULL) {
-        DrmUtils::LOG2BE(uuid, "No supported hal instance found");
-        mInitCheck = ERROR_UNSUPPORTED;
-    } else {
-        mInitCheck = OK;
-        if (mPluginV1_2 != NULL) {
-            if (!mPluginV1_2->setListener(this).isOk()) {
-                mInitCheck = DEAD_OBJECT;
-            }
-        } else if (!mPlugin->setListener(this).isOk()) {
-            mInitCheck = DEAD_OBJECT;
-        }
-        if (mInitCheck != OK) {
-            mPlugin.clear();
-            mPluginV1_1.clear();
-            mPluginV1_2.clear();
-            mPluginV1_4.clear();
-        }
-    }
-
-
-    return mInitCheck;
+status_t DrmHal::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
+    status_t statusResult;
+    statusResult = mDrmHalAidl->createPlugin(uuid, appPackageName);
+    if (statusResult != OK) return mDrmHalHidl->createPlugin(uuid, appPackageName);
+    return statusResult;
 }
 
 status_t DrmHal::destroyPlugin() {
-    cleanup();
-    return OK;
+    status_t statusResult = mDrmHalAidl->destroyPlugin();
+    status_t statusResultHidl = mDrmHalHidl->destroyPlugin();
+    if (statusResult != OK) return statusResult;
+    return statusResultHidl;
 }
 
-status_t DrmHal::openSession(DrmPlugin::SecurityLevel level,
-        Vector<uint8_t> &sessionId) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    SecurityLevel hSecurityLevel = toHidlSecurityLevel(level);
-    bool setSecurityLevel = true;
-
-    if (level == DrmPlugin::kSecurityLevelMax) {
-        setSecurityLevel = false;
-    } else {
-        if (hSecurityLevel == SecurityLevel::UNKNOWN) {
-            return ERROR_DRM_CANNOT_HANDLE;
-        }
-    }
-
-    status_t  err = UNKNOWN_ERROR;
-    bool retry = true;
-    do {
-        hidl_vec<uint8_t> hSessionId;
-
-        Return<void> hResult;
-        if (mPluginV1_1 == NULL || !setSecurityLevel) {
-            hResult = mPlugin->openSession(
-                    [&](Status status,const hidl_vec<uint8_t>& id) {
-                        if (status == Status::OK) {
-                            sessionId = toVector(id);
-                        }
-                        err = toStatusT(status);
-                    }
-                );
-        } else {
-            hResult = mPluginV1_1->openSession_1_1(hSecurityLevel,
-                    [&](Status status, const hidl_vec<uint8_t>& id) {
-                        if (status == Status::OK) {
-                            sessionId = toVector(id);
-                        }
-                        err = toStatusT(status);
-                    }
-                );
-        }
-
-        if (!hResult.isOk()) {
-            err = DEAD_OBJECT;
-        }
-
-        if (err == ERROR_DRM_RESOURCE_BUSY && retry) {
-            mLock.unlock();
-            // reclaimSession may call back to closeSession, since mLock is
-            // shared between Drm instances, we should unlock here to avoid
-            // deadlock.
-            retry = DrmSessionManager::Instance()->reclaimSession(AIBinder_getCallingPid());
-            mLock.lock();
-        } else {
-            retry = false;
-        }
-    } while (retry);
-
-    if (err == OK) {
-        std::shared_ptr<DrmSessionClient> client =
-                ndk::SharedRefBase::make<DrmSessionClient>(this, sessionId);
-        DrmSessionManager::Instance()->addSession(AIBinder_getCallingPid(),
-                std::static_pointer_cast<IResourceManagerClient>(client), sessionId);
-        mOpenSessions.push_back(client);
-        mMetrics.SetSessionStart(sessionId);
-    }
-
-    mMetrics.mOpenSessionCounter.Increment(err);
-    return err;
+status_t DrmHal::openSession(DrmPlugin::SecurityLevel securityLevel, Vector<uint8_t>& sessionId) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->openSession(securityLevel, sessionId);
+    return mDrmHalHidl->openSession(securityLevel, sessionId);
 }
 
-status_t DrmHal::closeSession(Vector<uint8_t> const &sessionId) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    Return<Status> status = mPlugin->closeSession(toHidlVec(sessionId));
-    if (status.isOk()) {
-        if (status == Status::OK) {
-            DrmSessionManager::Instance()->removeSession(sessionId);
-            for (auto i = mOpenSessions.begin(); i != mOpenSessions.end(); i++) {
-                if (isEqualSessionId((*i)->mSessionId, sessionId)) {
-                    mOpenSessions.erase(i);
-                    break;
-                }
-            }
-        }
-        status_t response = toStatusT(status);
-        mMetrics.SetSessionEnd(sessionId);
-        mMetrics.mCloseSessionCounter.Increment(response);
-        return response;
-    }
-    mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
-    return DEAD_OBJECT;
+status_t DrmHal::closeSession(Vector<uint8_t> const& sessionId) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->closeSession(sessionId);
+    return mDrmHalHidl->closeSession(sessionId);
 }
 
-static DrmPlugin::KeyRequestType toKeyRequestType(
-        KeyRequestType keyRequestType) {
-    switch (keyRequestType) {
-        case KeyRequestType::INITIAL:
-            return DrmPlugin::kKeyRequestType_Initial;
-            break;
-        case KeyRequestType::RENEWAL:
-            return DrmPlugin::kKeyRequestType_Renewal;
-            break;
-        case KeyRequestType::RELEASE:
-            return DrmPlugin::kKeyRequestType_Release;
-            break;
-        default:
-            return DrmPlugin::kKeyRequestType_Unknown;
-            break;
-    }
+status_t DrmHal::getKeyRequest(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& initData,
+                               String8 const& mimeType, DrmPlugin::KeyType keyType,
+                               KeyedVector<String8, String8> const& optionalParameters,
+                               Vector<uint8_t>& request, String8& defaultUrl,
+                               DrmPlugin::KeyRequestType* keyRequestType) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->getKeyRequest(sessionId, initData, mimeType, keyType,
+                                          optionalParameters, request, defaultUrl, keyRequestType);
+    return mDrmHalHidl->getKeyRequest(sessionId, initData, mimeType, keyType, optionalParameters,
+                                      request, defaultUrl, keyRequestType);
 }
 
-static DrmPlugin::KeyRequestType toKeyRequestType_1_1(
-        KeyRequestType_V1_1 keyRequestType) {
-    switch (keyRequestType) {
-        case KeyRequestType_V1_1::NONE:
-            return DrmPlugin::kKeyRequestType_None;
-            break;
-        case KeyRequestType_V1_1::UPDATE:
-            return DrmPlugin::kKeyRequestType_Update;
-            break;
-        default:
-            return toKeyRequestType(static_cast<KeyRequestType>(keyRequestType));
-            break;
-    }
+status_t DrmHal::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                    Vector<uint8_t> const& response, Vector<uint8_t>& keySetId) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->provideKeyResponse(sessionId, response, keySetId);
+    return mDrmHalHidl->provideKeyResponse(sessionId, response, keySetId);
 }
 
-status_t DrmHal::getKeyRequest(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &initData, String8 const &mimeType,
-        DrmPlugin::KeyType keyType, KeyedVector<String8,
-        String8> const &optionalParameters, Vector<uint8_t> &request,
-        String8 &defaultUrl, DrmPlugin::KeyRequestType *keyRequestType) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-    EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTimeUs);
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    KeyType hKeyType;
-    if (keyType == DrmPlugin::kKeyType_Streaming) {
-        hKeyType = KeyType::STREAMING;
-    } else if (keyType == DrmPlugin::kKeyType_Offline) {
-        hKeyType = KeyType::OFFLINE;
-    } else if (keyType == DrmPlugin::kKeyType_Release) {
-        hKeyType = KeyType::RELEASE;
-    } else {
-        keyRequestTimer.SetAttribute(BAD_VALUE);
-        return BAD_VALUE;
-    }
-
-    ::KeyedVector hOptionalParameters = toHidlKeyedVector(optionalParameters);
-
-    status_t err = UNKNOWN_ERROR;
-    Return<void> hResult;
-
-    if (mPluginV1_2 != NULL) {
-        hResult = mPluginV1_2->getKeyRequest_1_2(
-                toHidlVec(sessionId), toHidlVec(initData),
-                toHidlString(mimeType), hKeyType, hOptionalParameters,
-                [&](Status_V1_2 status, const hidl_vec<uint8_t>& hRequest,
-                        KeyRequestType_V1_1 hKeyRequestType,
-                        const hidl_string& hDefaultUrl) {
-                    if (status == Status_V1_2::OK) {
-                        request = toVector(hRequest);
-                        defaultUrl = toString8(hDefaultUrl);
-                        *keyRequestType = toKeyRequestType_1_1(hKeyRequestType);
-                    }
-                    err = toStatusT(status);
-                });
-    } else if (mPluginV1_1 != NULL) {
-        hResult = mPluginV1_1->getKeyRequest_1_1(
-                toHidlVec(sessionId), toHidlVec(initData),
-                toHidlString(mimeType), hKeyType, hOptionalParameters,
-                [&](Status status, const hidl_vec<uint8_t>& hRequest,
-                        KeyRequestType_V1_1 hKeyRequestType,
-                        const hidl_string& hDefaultUrl) {
-                    if (status == Status::OK) {
-                        request = toVector(hRequest);
-                        defaultUrl = toString8(hDefaultUrl);
-                        *keyRequestType = toKeyRequestType_1_1(hKeyRequestType);
-                    }
-                    err = toStatusT(status);
-                });
-    } else {
-        hResult = mPlugin->getKeyRequest(
-                toHidlVec(sessionId), toHidlVec(initData),
-                toHidlString(mimeType), hKeyType, hOptionalParameters,
-                [&](Status status, const hidl_vec<uint8_t>& hRequest,
-                        KeyRequestType hKeyRequestType,
-                        const hidl_string& hDefaultUrl) {
-                    if (status == Status::OK) {
-                        request = toVector(hRequest);
-                        defaultUrl = toString8(hDefaultUrl);
-                        *keyRequestType = toKeyRequestType(hKeyRequestType);
-                    }
-                    err = toStatusT(status);
-                });
-    }
-
-    err = hResult.isOk() ? err : DEAD_OBJECT;
-    keyRequestTimer.SetAttribute(err);
-    return err;
+status_t DrmHal::removeKeys(Vector<uint8_t> const& keySetId) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeKeys(keySetId);
+    return mDrmHalHidl->removeKeys(keySetId);
 }
 
-status_t DrmHal::provideKeyResponse(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &response, Vector<uint8_t> &keySetId) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-    EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTimeUs);
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->provideKeyResponse(toHidlVec(sessionId),
-            toHidlVec(response),
-            [&](Status status, const hidl_vec<uint8_t>& hKeySetId) {
-                if (status == Status::OK) {
-                    keySetId = toVector(hKeySetId);
-                }
-                err = toStatusT(status);
-            }
-        );
-    err = hResult.isOk() ? err : DEAD_OBJECT;
-    keyResponseTimer.SetAttribute(err);
-    return err;
+status_t DrmHal::restoreKeys(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keySetId) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->restoreKeys(sessionId, keySetId);
+    return mDrmHalHidl->restoreKeys(sessionId, keySetId);
 }
 
-status_t DrmHal::removeKeys(Vector<uint8_t> const &keySetId) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    Return<Status> status = mPlugin->removeKeys(toHidlVec(keySetId));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                KeyedVector<String8, String8>& infoMap) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->queryKeyStatus(sessionId, infoMap);
+    return mDrmHalHidl->queryKeyStatus(sessionId, infoMap);
 }
 
-status_t DrmHal::restoreKeys(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &keySetId) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    Return<Status> status = mPlugin->restoreKeys(toHidlVec(sessionId),
-            toHidlVec(keySetId));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                     Vector<uint8_t>& request, String8& defaultUrl) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->getProvisionRequest(certType, certAuthority, request, defaultUrl);
+    return mDrmHalHidl->getProvisionRequest(certType, certAuthority, request, defaultUrl);
 }
 
-status_t DrmHal::queryKeyStatus(Vector<uint8_t> const &sessionId,
-        KeyedVector<String8, String8> &infoMap) const {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    ::KeyedVector hInfoMap;
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->queryKeyStatus(toHidlVec(sessionId),
-            [&](Status status, const hidl_vec<KeyValue>& map) {
-                if (status == Status::OK) {
-                    infoMap = toKeyedVector(map);
-                }
-                err = toStatusT(status);
-            }
-        );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::provideProvisionResponse(Vector<uint8_t> const& response,
+                                          Vector<uint8_t>& certificate,
+                                          Vector<uint8_t>& wrappedKey) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->provideProvisionResponse(response, certificate, wrappedKey);
+    return mDrmHalHidl->provideProvisionResponse(response, certificate, wrappedKey);
 }
 
-status_t DrmHal::getProvisionRequest(String8 const &certType,
-        String8 const &certAuthority, Vector<uint8_t> &request,
-        String8 &defaultUrl) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    status_t err = UNKNOWN_ERROR;
-    Return<void> hResult;
-
-    if (mPluginV1_2 != NULL) {
-        hResult = mPluginV1_2->getProvisionRequest_1_2(
-                toHidlString(certType), toHidlString(certAuthority),
-                [&](Status_V1_2 status, const hidl_vec<uint8_t>& hRequest,
-                        const hidl_string& hDefaultUrl) {
-                    if (status == Status_V1_2::OK) {
-                        request = toVector(hRequest);
-                        defaultUrl = toString8(hDefaultUrl);
-                    }
-                    err = toStatusT(status);
-                }
-            );
-    } else {
-        hResult = mPlugin->getProvisionRequest(
-                toHidlString(certType), toHidlString(certAuthority),
-                [&](Status status, const hidl_vec<uint8_t>& hRequest,
-                        const hidl_string& hDefaultUrl) {
-                    if (status == Status::OK) {
-                        request = toVector(hRequest);
-                        defaultUrl = toString8(hDefaultUrl);
-                    }
-                    err = toStatusT(status);
-                }
-            );
-    }
-
-    err = hResult.isOk() ? err : DEAD_OBJECT;
-    mMetrics.mGetProvisionRequestCounter.Increment(err);
-    return err;
+status_t DrmHal::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecureStops(secureStops);
+    return mDrmHalHidl->getSecureStops(secureStops);
 }
 
-status_t DrmHal::provideProvisionResponse(Vector<uint8_t> const &response,
-        Vector<uint8_t> &certificate, Vector<uint8_t> &wrappedKey) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->provideProvisionResponse(toHidlVec(response),
-            [&](Status status, const hidl_vec<uint8_t>& hCertificate,
-                    const hidl_vec<uint8_t>& hWrappedKey) {
-                if (status == Status::OK) {
-                    certificate = toVector(hCertificate);
-                    wrappedKey = toVector(hWrappedKey);
-                }
-                err = toStatusT(status);
-            }
-        );
-
-    err = hResult.isOk() ? err : DEAD_OBJECT;
-    mMetrics.mProvideProvisionResponseCounter.Increment(err);
-    return err;
+status_t DrmHal::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecureStopIds(secureStopIds);
+    return mDrmHalHidl->getSecureStopIds(secureStopIds);
 }
 
-status_t DrmHal::getSecureStops(List<Vector<uint8_t>> &secureStops) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->getSecureStops(
-            [&](Status status, const hidl_vec<SecureStop>& hSecureStops) {
-                if (status == Status::OK) {
-                    secureStops = toSecureStops(hSecureStops);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecureStop(ssid, secureStop);
+    return mDrmHalHidl->getSecureStop(ssid, secureStop);
 }
 
-
-status_t DrmHal::getSecureStopIds(List<Vector<uint8_t>> &secureStopIds) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    if (mPluginV1_1 == NULL) {
-        return ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPluginV1_1->getSecureStopIds(
-            [&](Status status, const hidl_vec<SecureStopId>& hSecureStopIds) {
-                if (status == Status::OK) {
-                    secureStopIds = toSecureStopIds(hSecureStopIds);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->releaseSecureStops(ssRelease);
+    return mDrmHalHidl->releaseSecureStops(ssRelease);
 }
 
-
-status_t DrmHal::getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->getSecureStop(toHidlVec(ssid),
-            [&](Status status, const SecureStop& hSecureStop) {
-                if (status == Status::OK) {
-                    secureStop = toVector(hSecureStop.opaqueData);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
-}
-
-status_t DrmHal::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    Return<Status> status(Status::ERROR_DRM_UNKNOWN);
-    if (mPluginV1_1 != NULL) {
-        SecureStopRelease secureStopRelease;
-        secureStopRelease.opaqueData = toHidlVec(ssRelease);
-        status = mPluginV1_1->releaseSecureStops(secureStopRelease);
-    } else {
-        status = mPlugin->releaseSecureStop(toHidlVec(ssRelease));
-    }
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
-}
-
-status_t DrmHal::removeSecureStop(Vector<uint8_t> const &ssid) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    if (mPluginV1_1 == NULL) {
-        return ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    Return<Status> status = mPluginV1_1->removeSecureStop(toHidlVec(ssid));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::removeSecureStop(Vector<uint8_t> const& ssid) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeSecureStop(ssid);
+    return mDrmHalHidl->removeSecureStop(ssid);
 }
 
 status_t DrmHal::removeAllSecureStops() {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    Return<Status> status(Status::ERROR_DRM_UNKNOWN);
-    if (mPluginV1_1 != NULL) {
-        status = mPluginV1_1->removeAllSecureStops();
-    } else {
-        status = mPlugin->releaseAllSecureStops();
-    }
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeAllSecureStops();
+    return mDrmHalHidl->removeAllSecureStops();
 }
 
-status_t DrmHal::getHdcpLevels(DrmPlugin::HdcpLevel *connected,
-            DrmPlugin::HdcpLevel *max) const {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    if (connected == NULL || max == NULL) {
-        return BAD_VALUE;
-    }
-    status_t err = UNKNOWN_ERROR;
-
-    *connected = DrmPlugin::kHdcpLevelUnknown;
-    *max = DrmPlugin::kHdcpLevelUnknown;
-
-    Return<void> hResult;
-    if (mPluginV1_2 != NULL) {
-        hResult = mPluginV1_2->getHdcpLevels_1_2(
-                [&](Status_V1_2 status, const HdcpLevel_V1_2& hConnected, const HdcpLevel_V1_2& hMax) {
-                    if (status == Status_V1_2::OK) {
-                        *connected = toHdcpLevel(hConnected);
-                        *max = toHdcpLevel(hMax);
-                    }
-                    err = toStatusT(status);
-                });
-    } else if (mPluginV1_1 != NULL) {
-        hResult = mPluginV1_1->getHdcpLevels(
-                [&](Status status, const HdcpLevel& hConnected, const HdcpLevel& hMax) {
-                    if (status == Status::OK) {
-                        *connected = toHdcpLevel(static_cast<HdcpLevel_V1_2>(hConnected));
-                        *max = toHdcpLevel(static_cast<HdcpLevel_V1_2>(hMax));
-                    }
-                    err = toStatusT(status);
-                });
-    } else {
-        return ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
+                               DrmPlugin::HdcpLevel* maxLevel) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getHdcpLevels(connectedLevel, maxLevel);
+    return mDrmHalHidl->getHdcpLevels(connectedLevel, maxLevel);
 }
 
-status_t DrmHal::getNumberOfSessions(uint32_t *open, uint32_t *max) const {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    if (open == NULL || max == NULL) {
-        return BAD_VALUE;
-    }
-    status_t err = UNKNOWN_ERROR;
-
-    *open = 0;
-    *max = 0;
-
-    if (mPluginV1_1 == NULL) {
-        return ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    Return<void> hResult = mPluginV1_1->getNumberOfSessions(
-            [&](Status status, uint32_t hOpen, uint32_t hMax) {
-                if (status == Status::OK) {
-                    *open = hOpen;
-                    *max = hMax;
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->getNumberOfSessions(currentSessions, maxSessions);
+    return mDrmHalHidl->getNumberOfSessions(currentSessions, maxSessions);
 }
 
-status_t DrmHal::getSecurityLevel(Vector<uint8_t> const &sessionId,
-        DrmPlugin::SecurityLevel *level) const {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    if (level == NULL) {
-        return BAD_VALUE;
-    }
-    status_t err = UNKNOWN_ERROR;
-
-    if (mPluginV1_1 == NULL) {
-        return ERROR_DRM_CANNOT_HANDLE;
-    }
-
-    *level = DrmPlugin::kSecurityLevelUnknown;
-
-    Return<void> hResult = mPluginV1_1->getSecurityLevel(toHidlVec(sessionId),
-            [&](Status status, SecurityLevel hLevel) {
-                if (status == Status::OK) {
-                    *level = toSecurityLevel(hLevel);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                  DrmPlugin::SecurityLevel* level) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getSecurityLevel(sessionId, level);
+    return mDrmHalHidl->getSecurityLevel(sessionId, level);
 }
 
-status_t DrmHal::getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    if (mPluginV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPluginV1_2->getOfflineLicenseKeySetIds(
-            [&](Status status, const hidl_vec<KeySetId>& hKeySetIds) {
-                if (status == Status::OK) {
-                    keySetIds = toKeySetIds(hKeySetIds);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getOfflineLicenseKeySetIds(keySetIds);
+    return mDrmHalHidl->getOfflineLicenseKeySetIds(keySetIds);
 }
 
-status_t DrmHal::removeOfflineLicense(Vector<uint8_t> const &keySetId) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    if (mPluginV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    Return<Status> status = mPluginV1_2->removeOfflineLicense(toHidlVec(keySetId));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->removeOfflineLicense(keySetId);
+    return mDrmHalHidl->removeOfflineLicense(keySetId);
 }
 
-status_t DrmHal::getOfflineLicenseState(Vector<uint8_t> const &keySetId,
-        DrmPlugin::OfflineLicenseState *licenseState) const {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mInitCheck != OK) {
-        return mInitCheck;
-    }
-
-    if (mPluginV1_2 == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-    *licenseState = DrmPlugin::kOfflineLicenseStateUnknown;
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPluginV1_2->getOfflineLicenseState(toHidlVec(keySetId),
-            [&](Status status, OfflineLicenseState hLicenseState) {
-                if (status == Status::OK) {
-                    *licenseState = toOfflineLicenseState(hLicenseState);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                        DrmPlugin::OfflineLicenseState* licenseState) const {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->getOfflineLicenseState(keySetId, licenseState);
+    return mDrmHalHidl->getOfflineLicenseState(keySetId, licenseState);
 }
 
-status_t DrmHal::getPropertyString(String8 const &name, String8 &value ) const {
-    Mutex::Autolock autoLock(mLock);
-    return getPropertyStringInternal(name, value);
+status_t DrmHal::getPropertyString(String8 const& name, String8& value) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getPropertyString(name, value);
+    return mDrmHalHidl->getPropertyString(name, value);
 }
 
-status_t DrmHal::getPropertyStringInternal(String8 const &name, String8 &value) const {
-    // This function is internal to the class and should only be called while
-    // mLock is already held.
-    INIT_CHECK();
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->getPropertyString(toHidlString(name),
-            [&](Status status, const hidl_string& hValue) {
-                if (status == Status::OK) {
-                    value = toString8(hValue);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getPropertyByteArray(name, value);
+    return mDrmHalHidl->getPropertyByteArray(name, value);
 }
 
-status_t DrmHal::getPropertyByteArray(String8 const &name, Vector<uint8_t> &value ) const {
-    Mutex::Autolock autoLock(mLock);
-    return getPropertyByteArrayInternal(name, value);
+status_t DrmHal::setPropertyString(String8 const& name, String8 const& value) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setPropertyString(name, value);
+    return mDrmHalHidl->setPropertyString(name, value);
 }
 
-status_t DrmHal::getPropertyByteArrayInternal(String8 const &name, Vector<uint8_t> &value ) const {
-    // This function is internal to the class and should only be called while
-    // mLock is already held.
-    INIT_CHECK();
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->getPropertyByteArray(toHidlString(name),
-            [&](Status status, const hidl_vec<uint8_t>& hValue) {
-                if (status == Status::OK) {
-                    value = toVector(hValue);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    err = hResult.isOk() ? err : DEAD_OBJECT;
-    if (name == kPropertyDeviceUniqueId) {
-        mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
-    }
-    return err;
+status_t DrmHal::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setPropertyByteArray(name, value);
+    return mDrmHalHidl->setPropertyByteArray(name, value);
 }
 
-status_t DrmHal::setPropertyString(String8 const &name, String8 const &value ) const {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    Return<Status> status = mPlugin->setPropertyString(toHidlString(name),
-            toHidlString(value));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getMetrics(consumer);
+    return mDrmHalHidl->getMetrics(consumer);
 }
 
-status_t DrmHal::setPropertyByteArray(String8 const &name,
-                                   Vector<uint8_t> const &value ) const {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    Return<Status> status = mPlugin->setPropertyByteArray(toHidlString(name),
-            toHidlVec(value));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::setCipherAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->setCipherAlgorithm(sessionId, algorithm);
+    return mDrmHalHidl->setCipherAlgorithm(sessionId, algorithm);
 }
 
-status_t DrmHal::getMetrics(const sp<IDrmMetricsConsumer> &consumer) {
-    if (consumer == nullptr) {
-        return UNEXPECTED_NULL;
-    }
-    consumer->consumeFrameworkMetrics(mMetrics);
-
-    // Append vendor metrics if they are supported.
-    if (mPluginV1_1 != NULL) {
-        String8 vendor;
-        String8 description;
-        if (getPropertyStringInternal(String8("vendor"), vendor) != OK
-            || vendor.isEmpty()) {
-          ALOGE("Get vendor failed or is empty");
-          vendor = "NONE";
-        }
-        if (getPropertyStringInternal(String8("description"), description) != OK
-            || description.isEmpty()) {
-          ALOGE("Get description failed or is empty.");
-          description = "NONE";
-        }
-        vendor += ".";
-        vendor += description;
-
-        hidl_vec<DrmMetricGroup> pluginMetrics;
-        status_t err = UNKNOWN_ERROR;
-
-        Return<void> status = mPluginV1_1->getMetrics(
-                [&](Status status, hidl_vec<DrmMetricGroup> pluginMetrics) {
-                    if (status != Status::OK) {
-                      ALOGV("Error getting plugin metrics: %d", status);
-                    } else {
-                      consumer->consumeHidlMetrics(vendor, pluginMetrics);
-                    }
-                    err = toStatusT(status);
-                });
-        return status.isOk() ? err : DEAD_OBJECT;
-    }
-
-    return OK;
+status_t DrmHal::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setMacAlgorithm(sessionId, algorithm);
+    return mDrmHalHidl->setMacAlgorithm(sessionId, algorithm);
 }
 
-status_t DrmHal::setCipherAlgorithm(Vector<uint8_t> const &sessionId,
-                                 String8 const &algorithm) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    Return<Status> status = mPlugin->setCipherAlgorithm(toHidlVec(sessionId),
-            toHidlString(algorithm));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                         Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                         Vector<uint8_t>& output) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->encrypt(sessionId, keyId, input, iv, output);
+    return mDrmHalHidl->encrypt(sessionId, keyId, input, iv, output);
 }
 
-status_t DrmHal::setMacAlgorithm(Vector<uint8_t> const &sessionId,
-                              String8 const &algorithm) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    Return<Status> status = mPlugin->setMacAlgorithm(toHidlVec(sessionId),
-            toHidlString(algorithm));
-    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+status_t DrmHal::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                         Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                         Vector<uint8_t>& output) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->decrypt(sessionId, keyId, input, iv, output);
+    return mDrmHalHidl->decrypt(sessionId, keyId, input, iv, output);
 }
 
-status_t DrmHal::encrypt(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &keyId, Vector<uint8_t> const &input,
-        Vector<uint8_t> const &iv, Vector<uint8_t> &output) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->encrypt(toHidlVec(sessionId),
-            toHidlVec(keyId), toHidlVec(input), toHidlVec(iv),
-            [&](Status status, const hidl_vec<uint8_t>& hOutput) {
-                if (status == Status::OK) {
-                    output = toVector(hOutput);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                      Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->sign(sessionId, keyId, message, signature);
+    return mDrmHalHidl->sign(sessionId, keyId, message, signature);
 }
 
-status_t DrmHal::decrypt(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &keyId, Vector<uint8_t> const &input,
-        Vector<uint8_t> const &iv, Vector<uint8_t> &output) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    status_t  err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->decrypt(toHidlVec(sessionId),
-            toHidlVec(keyId), toHidlVec(input), toHidlVec(iv),
-            [&](Status status, const hidl_vec<uint8_t>& hOutput) {
-                if (status == Status::OK) {
-                    output = toVector(hOutput);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                        Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                        bool& match) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->verify(sessionId, keyId, message, signature, match);
+    return mDrmHalHidl->verify(sessionId, keyId, message, signature, match);
 }
 
-status_t DrmHal::sign(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &keyId, Vector<uint8_t> const &message,
-        Vector<uint8_t> &signature) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->sign(toHidlVec(sessionId),
-            toHidlVec(keyId), toHidlVec(message),
-            [&](Status status, const hidl_vec<uint8_t>& hSignature)  {
-                if (status == Status::OK) {
-                    signature = toVector(hSignature);
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                         Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                         Vector<uint8_t>& signature) {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->signRSA(sessionId, algorithm, message, wrappedKey, signature);
+    return mDrmHalHidl->signRSA(sessionId, algorithm, message, wrappedKey, signature);
 }
 
-status_t DrmHal::verify(Vector<uint8_t> const &sessionId,
-        Vector<uint8_t> const &keyId, Vector<uint8_t> const &message,
-        Vector<uint8_t> const &signature, bool &match) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->verify(toHidlVec(sessionId),toHidlVec(keyId),
-            toHidlVec(message), toHidlVec(signature),
-            [&](Status status, bool hMatch) {
-                if (status == Status::OK) {
-                    match = hMatch;
-                } else {
-                    match = false;
-                }
-                err = toStatusT(status);
-            }
-    );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::setListener(const sp<IDrmClient>& listener) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setListener(listener);
+    return mDrmHalHidl->setListener(listener);
 }
 
-status_t DrmHal::signRSA(Vector<uint8_t> const &sessionId,
-        String8 const &algorithm, Vector<uint8_t> const &message,
-        Vector<uint8_t> const &wrappedKey, Vector<uint8_t> &signature) {
-    Mutex::Autolock autoLock(mLock);
-    INIT_CHECK();
-
-    DrmSessionManager::Instance()->useSession(sessionId);
-
-    status_t err = UNKNOWN_ERROR;
-
-    Return<void> hResult = mPlugin->signRSA(toHidlVec(sessionId),
-            toHidlString(algorithm), toHidlVec(message), toHidlVec(wrappedKey),
-            [&](Status status, const hidl_vec<uint8_t>& hSignature) {
-                if (status == Status::OK) {
-                    signature = toVector(hSignature);
-                }
-                err = toStatusT(status);
-            }
-        );
-
-    return hResult.isOk() ? err : DEAD_OBJECT;
+status_t DrmHal::requiresSecureDecoder(const char* mime, bool* required) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->requiresSecureDecoder(mime, required);
+    return mDrmHalHidl->requiresSecureDecoder(mime, required);
 }
 
-std::string DrmHal::reportFrameworkMetrics(const std::string& pluginMetrics) const
-{
-    mediametrics_handle_t item(mediametrics_create("mediadrm"));
-    mediametrics_setUid(item, mMetrics.GetAppUid());
-    String8 vendor;
-    String8 description;
-    status_t result = getPropertyStringInternal(String8("vendor"), vendor);
-    if (result != OK) {
-        ALOGE("Failed to get vendor from drm plugin: %d", result);
-    } else {
-        mediametrics_setCString(item, "vendor", vendor.c_str());
-    }
-    result = getPropertyStringInternal(String8("description"), description);
-    if (result != OK) {
-        ALOGE("Failed to get description from drm plugin: %d", result);
-    } else {
-        mediametrics_setCString(item, "description", description.c_str());
-    }
-
-    std::string serializedMetrics;
-    result = mMetrics.GetSerializedMetrics(&serializedMetrics);
-    if (result != OK) {
-        ALOGE("Failed to serialize framework metrics: %d", result);
-    }
-    std::string b64EncodedMetrics = toBase64StringNoPad(serializedMetrics.data(),
-                                                        serializedMetrics.size());
-    if (!b64EncodedMetrics.empty()) {
-        mediametrics_setCString(item, "serialized_metrics", b64EncodedMetrics.c_str());
-    }
-    if (!pluginMetrics.empty()) {
-        mediametrics_setCString(item, "plugin_metrics", pluginMetrics.c_str());
-    }
-    if (!mediametrics_selfRecord(item)) {
-        ALOGE("Failed to self record framework metrics");
-    }
-    mediametrics_delete(item);
-    return serializedMetrics;
+status_t DrmHal::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
+                                       bool* required) const {
+    if (mDrmHalAidl->initCheck() == OK)
+        return mDrmHalAidl->requiresSecureDecoder(mime, securityLevel, required);
+    return mDrmHalHidl->requiresSecureDecoder(mime, securityLevel, required);
 }
 
-std::string DrmHal::reportPluginMetrics() const
-{
-    Vector<uint8_t> metricsVector;
-    String8 vendor;
-    String8 description;
-    std::string metricsString;
-    if (getPropertyStringInternal(String8("vendor"), vendor) == OK &&
-            getPropertyStringInternal(String8("description"), description) == OK &&
-            getPropertyByteArrayInternal(String8("metrics"), metricsVector) == OK) {
-        metricsString = toBase64StringNoPad(metricsVector.array(),
-                                                        metricsVector.size());
-        status_t res = android::reportDrmPluginMetrics(metricsString, vendor,
-                                                       description, mMetrics.GetAppUid());
-        if (res != OK) {
-            ALOGE("Metrics were retrieved but could not be reported: %d", res);
-        }
-    }
-    return metricsString;
+status_t DrmHal::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->setPlaybackId(sessionId, playbackId);
+    return mDrmHalHidl->setPlaybackId(sessionId, playbackId);
 }
 
-status_t DrmHal::requiresSecureDecoder(const char *mime, bool *required) const {
-    Mutex::Autolock autoLock(mLock);
-    if (mPluginV1_4 == NULL) {
-        return false;
-    }
-    auto hResult = mPluginV1_4->requiresSecureDecoderDefault(hidl_string(mime));
-    if (!hResult.isOk()) {
-        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (required) {
-        *required = hResult;
-    }
-    return OK;
+status_t DrmHal::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    if (mDrmHalAidl->initCheck() == OK) return mDrmHalAidl->getLogMessages(logs);
+    return mDrmHalHidl->getLogMessages(logs);
 }
 
-status_t DrmHal::requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
-                                       bool *required) const {
-    Mutex::Autolock autoLock(mLock);
-    if (mPluginV1_4 == NULL) {
-        return false;
-    }
-    auto hLevel = toHidlSecurityLevel(securityLevel);
-    auto hResult = mPluginV1_4->requiresSecureDecoder(hidl_string(mime), hLevel);
-    if (!hResult.isOk()) {
-        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (required) {
-        *required = hResult;
-    }
-    return OK;
-}
-
-status_t DrmHal::setPlaybackId(Vector<uint8_t> const &sessionId, const char *playbackId) {
-    Mutex::Autolock autoLock(mLock);
-    if (mPluginV1_4 == NULL) {
-        return ERROR_UNSUPPORTED;
-    }
-    auto err = mPluginV1_4->setPlaybackId(toHidlVec(sessionId), hidl_string(playbackId));
-    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
-}
-
-status_t DrmHal::getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const {
-    Mutex::Autolock autoLock(mLock);
-    return DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs);
+status_t DrmHal::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+    status_t statusResult;
+    statusResult = mDrmHalAidl->getSupportedSchemes(schemes);
+    if (statusResult == OK) return statusResult;
+    return mDrmHalHidl->getSupportedSchemes(schemes);
 }
 
 }  // namespace android
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
new file mode 100644
index 0000000..bdd83e9
--- /dev/null
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -0,0 +1,1254 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DrmHalAidl"
+
+#include <array>
+#include <algorithm>
+#include <map>
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <media/PluginMetricsReporting.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <mediadrm/DrmHalAidl.h>
+#include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmUtils.h>
+
+using ::android::DrmUtils::statusAidlToStatusT;
+using ::aidl::android::hardware::drm::CryptoSchemes;
+using ::aidl::android::hardware::drm::DrmMetricNamedValue;
+using ::aidl::android::hardware::drm::DrmMetricValue;
+using ::aidl::android::hardware::drm::HdcpLevel;
+using ::aidl::android::hardware::drm::HdcpLevels;
+using ::aidl::android::hardware::drm::KeyRequest;
+using ::aidl::android::hardware::drm::KeyRequestType;
+using ::aidl::android::hardware::drm::KeySetId;
+using ::aidl::android::hardware::drm::KeyStatus;
+using ::aidl::android::hardware::drm::KeyStatusType;
+using ::aidl::android::hardware::drm::KeyType;
+using ::aidl::android::hardware::drm::KeyValue;
+using ::aidl::android::hardware::drm::NumberOfSessions;
+using ::aidl::android::hardware::drm::OfflineLicenseState;
+using ::aidl::android::hardware::drm::OpaqueData;
+using ::aidl::android::hardware::drm::ProvideProvisionResponseResult;
+using ::aidl::android::hardware::drm::ProvisionRequest;
+using ::aidl::android::hardware::drm::SecureStop;
+using ::aidl::android::hardware::drm::SecureStopId;
+using ::aidl::android::hardware::drm::SecurityLevel;
+using ::aidl::android::hardware::drm::Status;
+using ::aidl::android::hardware::drm::SupportedContentType;
+using ::aidl::android::hardware::drm::Uuid;
+using DrmMetricGroupAidl = ::aidl::android::hardware::drm::DrmMetricGroup;
+using DrmMetricGroupHidl = ::android::hardware::drm::V1_1::DrmMetricGroup;
+using DrmMetricAidl = ::aidl::android::hardware::drm::DrmMetric;
+using DrmMetricHidl = ::android::hardware::drm::V1_1::DrmMetricGroup::Metric;
+using ValueHidl = ::android::hardware::drm::V1_1::DrmMetricGroup::Value;
+using AttributeHidl = ::android::hardware::drm::V1_1::DrmMetricGroup::Attribute;
+using IDrmPluginAidl = ::aidl::android::hardware::drm::IDrmPlugin;
+using EventTypeAidl = ::aidl::android::hardware::drm::EventType;
+using ::android::hardware::hidl_vec;
+
+namespace {
+
+constexpr char kPropertyDeviceUniqueId[] = "deviceUniqueId";
+constexpr char kEqualsSign[] = "=";
+
+template <typename T>
+std::string toBase64StringNoPad(const T* data, size_t size) {
+    // Note that the base 64 conversion only works with arrays of single-byte
+    // values. If the source is empty or is not an array of single-byte values,
+    // return empty string.
+    if (size == 0 || sizeof(data[0]) != 1) {
+        return "";
+    }
+
+    android::AString outputString;
+    encodeBase64(data, size, &outputString);
+    // Remove trailing equals padding if it exists.
+    while (outputString.size() > 0 && outputString.endsWith(kEqualsSign)) {
+        outputString.erase(outputString.size() - 1, 1);
+    }
+
+    return std::string(outputString.c_str(), outputString.size());
+}
+
+}  // anonymous namespace
+
+namespace android {
+
+#define INIT_CHECK()                             \
+    {                                            \
+        if (mInitCheck != OK) return mInitCheck; \
+    }
+
+template <typename Byte = uint8_t>
+static std::vector<Byte> toStdVec(const Vector<uint8_t>& vector) {
+    auto v = reinterpret_cast<const Byte*>(vector.array());
+    std::vector<Byte> vec(v, v + vector.size());
+    return vec;
+}
+
+static const Vector<uint8_t> toVector(const std::vector<uint8_t>& vec) {
+    Vector<uint8_t> vector;
+    vector.appendArray(vec.data(), vec.size());
+    return *const_cast<const Vector<uint8_t>*>(&vector);
+}
+
+static String8 toString8(const std::string& string) {
+    return String8(string.c_str());
+}
+
+static std::string toStdString(const String8& string8) {
+    return std::string(string8.string());
+}
+
+static std::vector<KeyValue> toKeyValueVector(const KeyedVector<String8, String8>& keyedVector) {
+    std::vector<KeyValue> stdKeyedVector;
+    for (size_t i = 0; i < keyedVector.size(); i++) {
+        KeyValue keyValue;
+        keyValue.key = toStdString(keyedVector.keyAt(i));
+        keyValue.value = toStdString(keyedVector.valueAt(i));
+        stdKeyedVector.push_back(keyValue);
+    }
+    return stdKeyedVector;
+}
+
+static KeyedVector<String8, String8> toKeyedVector(const std::vector<KeyValue>& keyValueVec) {
+    KeyedVector<String8, String8> keyedVector;
+    for (size_t i = 0; i < keyValueVec.size(); i++) {
+        keyedVector.add(toString8(keyValueVec[i].key), toString8(keyValueVec[i].value));
+    }
+    return keyedVector;
+}
+
+static DrmPlugin::KeyRequestType toKeyRequestType(KeyRequestType keyRequestType) {
+    switch (keyRequestType) {
+        case KeyRequestType::INITIAL:
+            return DrmPlugin::kKeyRequestType_Initial;
+            break;
+        case KeyRequestType::RENEWAL:
+            return DrmPlugin::kKeyRequestType_Renewal;
+            break;
+        case KeyRequestType::RELEASE:
+            return DrmPlugin::kKeyRequestType_Release;
+            break;
+        case KeyRequestType::NONE:
+            return DrmPlugin::kKeyRequestType_None;
+            break;
+        case KeyRequestType::UPDATE:
+            return DrmPlugin::kKeyRequestType_Update;
+            break;
+        default:
+            return DrmPlugin::kKeyRequestType_Unknown;
+            break;
+    }
+}
+
+static List<Vector<uint8_t>> toSecureStops(const std::vector<SecureStop>& aSecureStops) {
+    List<Vector<uint8_t>> secureStops;
+    for (size_t i = 0; i < aSecureStops.size(); i++) {
+        secureStops.push_back(toVector(aSecureStops[i].opaqueData));
+    }
+    return secureStops;
+}
+
+static List<Vector<uint8_t>> toSecureStopIds(const std::vector<SecureStopId>& aSecureStopIds) {
+    List<Vector<uint8_t>> secureStopIds;
+    for (size_t i = 0; i < aSecureStopIds.size(); i++) {
+        secureStopIds.push_back(toVector(aSecureStopIds[i].secureStopId));
+    }
+    return secureStopIds;
+}
+
+static DrmPlugin::HdcpLevel toHdcpLevel(HdcpLevel level) {
+    switch (level) {
+        case HdcpLevel::HDCP_NONE:
+            return DrmPlugin::kHdcpNone;
+        case HdcpLevel::HDCP_V1:
+            return DrmPlugin::kHdcpV1;
+        case HdcpLevel::HDCP_V2:
+            return DrmPlugin::kHdcpV2;
+        case HdcpLevel::HDCP_V2_1:
+            return DrmPlugin::kHdcpV2_1;
+        case HdcpLevel::HDCP_V2_2:
+            return DrmPlugin::kHdcpV2_2;
+        case HdcpLevel::HDCP_V2_3:
+            return DrmPlugin::kHdcpV2_3;
+        case HdcpLevel::HDCP_NO_OUTPUT:
+            return DrmPlugin::kHdcpNoOutput;
+        default:
+            return DrmPlugin::kHdcpLevelUnknown;
+    }
+}
+
+static DrmPlugin::SecurityLevel toSecurityLevel(SecurityLevel level) {
+    switch (level) {
+        case SecurityLevel::SW_SECURE_CRYPTO:
+            return DrmPlugin::kSecurityLevelSwSecureCrypto;
+        case SecurityLevel::SW_SECURE_DECODE:
+            return DrmPlugin::kSecurityLevelSwSecureDecode;
+        case SecurityLevel::HW_SECURE_CRYPTO:
+            return DrmPlugin::kSecurityLevelHwSecureCrypto;
+        case SecurityLevel::HW_SECURE_DECODE:
+            return DrmPlugin::kSecurityLevelHwSecureDecode;
+        case SecurityLevel::HW_SECURE_ALL:
+            return DrmPlugin::kSecurityLevelHwSecureAll;
+        case SecurityLevel::DEFAULT:
+            return DrmPlugin::kSecurityLevelMax;
+        default:
+            return DrmPlugin::kSecurityLevelUnknown;
+    }
+}
+
+static SecurityLevel toAidlSecurityLevel(DrmPlugin::SecurityLevel level) {
+    switch (level) {
+        case DrmPlugin::kSecurityLevelSwSecureCrypto:
+            return SecurityLevel::SW_SECURE_CRYPTO;
+        case DrmPlugin::kSecurityLevelSwSecureDecode:
+            return SecurityLevel::SW_SECURE_DECODE;
+        case DrmPlugin::kSecurityLevelHwSecureCrypto:
+            return SecurityLevel::HW_SECURE_CRYPTO;
+        case DrmPlugin::kSecurityLevelHwSecureDecode:
+            return SecurityLevel::HW_SECURE_DECODE;
+        case DrmPlugin::kSecurityLevelHwSecureAll:
+            return SecurityLevel::HW_SECURE_ALL;
+        case DrmPlugin::kSecurityLevelMax:
+            return SecurityLevel::DEFAULT;
+        default:
+            return SecurityLevel::UNKNOWN;
+    }
+}
+
+static List<Vector<uint8_t>> toKeySetIds(const std::vector<KeySetId>& hKeySetIds) {
+    List<Vector<uint8_t>> keySetIds;
+    for (size_t i = 0; i < hKeySetIds.size(); i++) {
+        keySetIds.push_back(toVector(hKeySetIds[i].keySetId));
+    }
+    return keySetIds;
+}
+
+static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t>& vector) {
+    hidl_vec<uint8_t> vec;
+    vec.setToExternal(const_cast<uint8_t*>(vector.array()), vector.size());
+    return vec;
+}
+
+static DrmPlugin::OfflineLicenseState toOfflineLicenseState(OfflineLicenseState licenseState) {
+    switch (licenseState) {
+        case OfflineLicenseState::USABLE:
+            return DrmPlugin::kOfflineLicenseStateUsable;
+        case OfflineLicenseState::INACTIVE:
+            return DrmPlugin::kOfflineLicenseStateReleased;
+        default:
+            return DrmPlugin::kOfflineLicenseStateUnknown;
+    }
+}
+
+Mutex DrmHalAidl::mLock;
+
+static hidl_vec<DrmMetricGroupHidl> toDrmMetricGroupHidl(std::vector<DrmMetricGroupAidl> result) {
+    std::vector<DrmMetricGroupHidl> resultHidl;
+    for (auto r : result) {
+        DrmMetricGroupHidl re;
+        std::vector<DrmMetricHidl> tmpMetric;
+        for (auto m : r.metrics) {
+            DrmMetricHidl me;
+            me.name = m.name;
+            std::vector<AttributeHidl> aTmp;
+            for (auto attr : m.attributes) {
+                AttributeHidl attrHidl;
+                attrHidl.name = attr.name;
+
+                switch (attr.value.getTag()) {
+                    case DrmMetricValue::Tag::int64Value:
+                        attrHidl.type = DrmMetricGroupHidl::ValueType::INT64_TYPE;
+                        attrHidl.int64Value = attr.value.get<DrmMetricValue::Tag::int64Value>();
+                        break;
+                    case DrmMetricValue::Tag::doubleValue:
+                        attrHidl.type = DrmMetricGroupHidl::ValueType::DOUBLE_TYPE;
+                        attrHidl.doubleValue = attr.value.get<DrmMetricValue::Tag::doubleValue>();
+                        break;
+                    case DrmMetricValue::Tag::stringValue:
+                        attrHidl.type = DrmMetricGroupHidl::ValueType::STRING_TYPE;
+                        attrHidl.stringValue = attr.value.get<DrmMetricValue::Tag::stringValue>();
+                        break;
+                    default:
+                        break;
+                }
+
+                aTmp.push_back(attrHidl);
+            }
+
+            me.attributes = aTmp;
+
+            std::vector<ValueHidl> vTmp;
+            for (auto value : m.values) {
+                ValueHidl valueHidl;
+                valueHidl.componentName = value.name;
+                switch (value.value.getTag()) {
+                    case DrmMetricValue::Tag::int64Value:
+                        valueHidl.type = DrmMetricGroupHidl::ValueType::INT64_TYPE;
+                        valueHidl.int64Value = value.value.get<DrmMetricValue::Tag::int64Value>();
+                        break;
+                    case DrmMetricValue::Tag::doubleValue:
+                        valueHidl.type = DrmMetricGroupHidl::ValueType::DOUBLE_TYPE;
+                        valueHidl.doubleValue = value.value.get<DrmMetricValue::Tag::doubleValue>();
+                        break;
+                    case DrmMetricValue::Tag::stringValue:
+                        valueHidl.type = DrmMetricGroupHidl::ValueType::STRING_TYPE;
+                        valueHidl.stringValue = value.value.get<DrmMetricValue::Tag::stringValue>();
+                        break;
+                    default:
+                        break;
+                }
+
+                vTmp.push_back(valueHidl);
+            }
+
+            me.values = vTmp;
+            tmpMetric.push_back(me);
+        }
+
+        re.metrics = tmpMetric;
+        resultHidl.push_back(re);
+    }
+
+    return resultHidl;
+}
+
+// DrmSessionClient Definition
+
+struct DrmHalAidl::DrmSessionClient : public aidl::android::media::BnResourceManagerClient {
+    explicit DrmSessionClient(DrmHalAidl* drm, const Vector<uint8_t>& sessionId)
+        : mSessionId(sessionId), mDrm(drm) {}
+
+    ::ndk::ScopedAStatus reclaimResource(bool* _aidl_return) override;
+    ::ndk::ScopedAStatus getName(::std::string* _aidl_return) override;
+
+    const Vector<uint8_t> mSessionId;
+
+    virtual ~DrmSessionClient();
+
+  private:
+    wp<DrmHalAidl> mDrm;
+
+    DISALLOW_EVIL_CONSTRUCTORS(DrmSessionClient);
+};
+
+::ndk::ScopedAStatus DrmHalAidl::DrmSessionClient::reclaimResource(bool* _aidl_return) {
+    auto sessionId = mSessionId;
+    sp<DrmHalAidl> drm = mDrm.promote();
+    if (drm == NULL) {
+        *_aidl_return = true;
+        return ::ndk::ScopedAStatus::ok();
+    }
+    status_t err = drm->closeSession(sessionId);
+    if (err != OK) {
+        *_aidl_return = false;
+        return ::ndk::ScopedAStatus::ok();
+    }
+    drm->onEvent(EventTypeAidl::SESSION_RECLAIMED, toHidlVec(sessionId), hidl_vec<uint8_t>());
+    *_aidl_return = true;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalAidl::DrmSessionClient::getName(::std::string* _aidl_return) {
+    String8 name;
+    sp<DrmHalAidl> drm = mDrm.promote();
+    if (drm == NULL) {
+        name.append("<deleted>");
+    } else if (drm->getPropertyStringInternal(String8("vendor"), name) != OK || name.isEmpty()) {
+        name.append("<Get vendor failed or is empty>");
+    }
+    name.append("[");
+    for (size_t i = 0; i < mSessionId.size(); ++i) {
+        name.appendFormat("%02x", mSessionId[i]);
+    }
+    name.append("]");
+    *_aidl_return = name;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+DrmHalAidl::DrmSessionClient::~DrmSessionClient() {
+    DrmSessionManager::Instance()->removeSession(mSessionId);
+}
+
+// DrmHalAidl methods
+DrmHalAidl::DrmHalAidl()
+    : mListener(::ndk::SharedRefBase::make<DrmHalListener>(&mMetrics)),
+      mFactories(DrmUtils::makeDrmFactoriesAidl()),
+      mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {}
+
+status_t DrmHalAidl::initCheck() const {
+    return mInitCheck;
+}
+
+DrmHalAidl::~DrmHalAidl() {}
+
+status_t DrmHalAidl::setListener(const sp<IDrmClient>& listener) {
+    mListener->setListener(listener);
+    return NO_ERROR;
+}
+
+status_t DrmHalAidl::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                             DrmPlugin::SecurityLevel level, bool* isSupported) {
+    Mutex::Autolock autoLock(mLock);
+    *isSupported = false;
+    Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
+    SecurityLevel levelAidl = toAidlSecurityLevel(level);
+    std::string mimeTypeStr = mimeType.string();
+
+    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        CryptoSchemes schemes{};
+        auto err = mFactories[i]->getSupportedCryptoSchemes(&schemes);
+        if (!err.isOk() || !std::count(schemes.uuids.begin(), schemes.uuids.end(), uuidAidl)) {
+            continue;
+        }
+
+        ALOGV("supported schemes: %s; query: level %d mime %s",
+              schemes.toString().c_str(), levelAidl, mimeType.c_str());
+        std::map<std::string, SupportedContentType> contentTypes;
+        for (auto ct : schemes.mimeTypes) {
+            contentTypes[ct.mime] = ct;
+        }
+
+        // handle default value cases
+        if (levelAidl == SecurityLevel::DEFAULT || levelAidl == SecurityLevel::UNKNOWN) {
+            if (mimeType == "") {
+                // isCryptoSchemeSupported(uuid)
+                *isSupported = true;
+            } else {
+                // isCryptoSchemeSupported(uuid, mimeType)
+                *isSupported = contentTypes.count(mimeTypeStr);
+            }
+            return OK;
+        } else if (mimeType == "") {
+            return BAD_VALUE;
+        }
+
+        auto ct = contentTypes[mimeTypeStr];
+        if (levelAidl > ct.maxLevel || levelAidl < ct.minLevel) {
+              continue;
+        }
+
+        *isSupported = true;
+        break;
+    }
+
+    return OK;
+}
+
+status_t DrmHalAidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
+    Mutex::Autolock autoLock(mLock);
+
+    Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
+    std::string appPackageNameAidl = toStdString(appPackageName);
+    std::shared_ptr<IDrmPluginAidl> pluginAidl;
+    mMetrics.SetAppPackageName(appPackageName);
+    mMetrics.SetAppUid(AIBinder_getCallingUid());
+    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        ::ndk::ScopedAStatus status =
+                mFactories[i]->createDrmPlugin(uuidAidl, appPackageNameAidl, &pluginAidl);
+        if (status.isOk()) {
+            if (pluginAidl != NULL) {
+                mPlugin = pluginAidl;
+                break;
+            }
+        } else {
+            DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d",
+                             status.getServiceSpecificError());
+        }
+    }
+
+    if (mPlugin == NULL) {
+        DrmUtils::LOG2BE(uuid, "No supported hal instance found");
+        mInitCheck = ERROR_UNSUPPORTED;
+    } else {
+        mInitCheck = OK;
+        // Stored pointer mListener upcast to base BnDrmPluginListener
+        ::ndk::ScopedAStatus status = mPlugin
+            ->setListener(std::static_pointer_cast<BnDrmPluginListener>(mListener));
+        if (!status.isOk()) {
+            mInitCheck = DEAD_OBJECT;
+            ALOGE("setListener failed: ex %d svc err %d",
+                status.getExceptionCode(),
+                status.getServiceSpecificError());
+        }
+
+        if (mInitCheck != OK) {
+            mPlugin.reset();
+        }
+    }
+
+    return mInitCheck;
+}
+
+status_t DrmHalAidl::openSession(DrmPlugin::SecurityLevel level, Vector<uint8_t>& sessionId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    SecurityLevel aSecurityLevel = toAidlSecurityLevel(level);
+
+    if (aSecurityLevel == SecurityLevel::UNKNOWN) {
+        return ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    status_t err = UNKNOWN_ERROR;
+    bool retry = true;
+    do {
+        std::vector<uint8_t> aSessionId;
+
+        ::ndk::ScopedAStatus status = mPlugin->openSession(aSecurityLevel, &aSessionId);
+        if (status.isOk()) sessionId = toVector(aSessionId);
+        err = statusAidlToStatusT(status);
+
+        if (err == ERROR_DRM_RESOURCE_BUSY && retry) {
+            mLock.unlock();
+            // reclaimSession may call back to closeSession, since mLock is
+            // shared between Drm instances, we should unlock here to avoid
+            // deadlock.
+            retry = DrmSessionManager::Instance()->reclaimSession(AIBinder_getCallingPid());
+            mLock.lock();
+        } else {
+            retry = false;
+        }
+    } while (retry);
+
+    if (err == OK) {
+        std::shared_ptr<DrmSessionClient> client =
+                ndk::SharedRefBase::make<DrmSessionClient>(this, sessionId);
+        DrmSessionManager::Instance()->addSession(
+                AIBinder_getCallingPid(), std::static_pointer_cast<IResourceManagerClient>(client),
+                sessionId);
+        mOpenSessions.push_back(client);
+        mMetrics.SetSessionStart(sessionId);
+    }
+
+    mMetrics.mOpenSessionCounter.Increment(err);
+    return err;
+}
+
+status_t DrmHalAidl::closeSession(Vector<uint8_t> const& sessionId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
+    ::ndk::ScopedAStatus status = mPlugin->closeSession(sessionIdAidl);
+    status_t response = statusAidlToStatusT(status);
+    if (status.isOk()) {
+        DrmSessionManager::Instance()->removeSession(sessionId);
+        for (auto i = mOpenSessions.begin(); i != mOpenSessions.end(); i++) {
+            if (isEqualSessionId((*i)->mSessionId, sessionId)) {
+                mOpenSessions.erase(i);
+                break;
+            }
+        }
+
+        mMetrics.SetSessionEnd(sessionId);
+    }
+
+    mMetrics.mCloseSessionCounter.Increment(response);
+    return response;
+}
+
+status_t DrmHalAidl::getKeyRequest(Vector<uint8_t> const& sessionId,
+                                   Vector<uint8_t> const& initData, String8 const& mimeType,
+                                   DrmPlugin::KeyType keyType,
+                                   KeyedVector<String8, String8> const& optionalParameters,
+                                   Vector<uint8_t>& request, String8& defaultUrl,
+                                   DrmPlugin::KeyRequestType* keyRequestType) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+    EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTimeUs);
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    KeyType aKeyType;
+    if (keyType == DrmPlugin::kKeyType_Streaming) {
+        aKeyType = KeyType::STREAMING;
+    } else if (keyType == DrmPlugin::kKeyType_Offline) {
+        aKeyType = KeyType::OFFLINE;
+    } else if (keyType == DrmPlugin::kKeyType_Release) {
+        aKeyType = KeyType::RELEASE;
+    } else {
+        keyRequestTimer.SetAttribute(BAD_VALUE);
+        return BAD_VALUE;
+    }
+
+    status_t err = UNKNOWN_ERROR;
+
+    std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
+    std::vector<uint8_t> initDataAidl = toStdVec(initData);
+    KeyRequest keyRequest;
+
+    ::ndk::ScopedAStatus status =
+            mPlugin->getKeyRequest(sessionIdAidl, initDataAidl, toStdString(mimeType), aKeyType,
+                                   toKeyValueVector(optionalParameters), &keyRequest);
+    if (status.isOk()) {
+        request = toVector(keyRequest.request);
+        defaultUrl = toString8(keyRequest.defaultUrl);
+        *keyRequestType = toKeyRequestType(keyRequest.requestType);
+    }
+
+    err = statusAidlToStatusT(status);
+    keyRequestTimer.SetAttribute(err);
+    return err;
+}
+
+status_t DrmHalAidl::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                        Vector<uint8_t> const& response,
+                                        Vector<uint8_t>& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+    EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTimeUs);
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
+    std::vector<uint8_t> responseAidl = toStdVec(response);
+    KeySetId keySetIdsAidl;
+    ::ndk::ScopedAStatus status =
+            mPlugin->provideKeyResponse(sessionIdAidl, responseAidl, &keySetIdsAidl);
+
+    if (status.isOk()) keySetId = toVector(keySetIdsAidl.keySetId);
+    err = statusAidlToStatusT(status);
+    keyResponseTimer.SetAttribute(err);
+    return err;
+}
+
+status_t DrmHalAidl::removeKeys(Vector<uint8_t> const& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    ::ndk::ScopedAStatus status = mPlugin->removeKeys(toStdVec(keySetId));
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::restoreKeys(Vector<uint8_t> const& sessionId,
+                                 Vector<uint8_t> const& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    KeySetId keySetIdsAidl;
+    keySetIdsAidl.keySetId = toStdVec(keySetId);
+    ::ndk::ScopedAStatus status = mPlugin->restoreKeys(toStdVec(sessionId), keySetIdsAidl);
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                    KeyedVector<String8, String8>& infoMap) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    std::vector<KeyValue> infoMapAidl;
+    ::ndk::ScopedAStatus status = mPlugin->queryKeyStatus(toStdVec(sessionId), &infoMapAidl);
+
+    infoMap = toKeyedVector(infoMapAidl);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                         Vector<uint8_t>& request, String8& defaultUrl) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    ProvisionRequest requestAidl;
+    ::ndk::ScopedAStatus status = mPlugin->getProvisionRequest(
+            toStdString(certType), toStdString(certAuthority), &requestAidl);
+
+    request = toVector(requestAidl.request);
+    defaultUrl = toString8(requestAidl.defaultUrl);
+
+    err = statusAidlToStatusT(status);
+    mMetrics.mGetProvisionRequestCounter.Increment(err);
+    return err;
+}
+
+status_t DrmHalAidl::provideProvisionResponse(Vector<uint8_t> const& response,
+                                              Vector<uint8_t>& certificate,
+                                              Vector<uint8_t>& wrappedKey) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+    ProvideProvisionResponseResult result;
+    ::ndk::ScopedAStatus status = mPlugin->provideProvisionResponse(toStdVec(response), &result);
+
+    certificate = toVector(result.certificate);
+    wrappedKey = toVector(result.wrappedKey);
+    err = statusAidlToStatusT(status);
+    mMetrics.mProvideProvisionResponseCounter.Increment(err);
+    return err;
+}
+
+status_t DrmHalAidl::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    std::vector<SecureStop> result;
+    ::ndk::ScopedAStatus status = mPlugin->getSecureStops(&result);
+
+    secureStops = toSecureStops(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    std::vector<SecureStopId> result;
+    ::ndk::ScopedAStatus status = mPlugin->getSecureStopIds(&result);
+
+    secureStopIds = toSecureStopIds(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    SecureStopId ssidAidl;
+    ssidAidl.secureStopId = toStdVec(ssid);
+
+    SecureStop result;
+    ::ndk::ScopedAStatus status = mPlugin->getSecureStop(ssidAidl, &result);
+
+    secureStop = toVector(result.opaqueData);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    OpaqueData ssId;
+    ssId.opaqueData = toStdVec(ssRelease);
+    ::ndk::ScopedAStatus status = mPlugin->releaseSecureStops(ssId);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::removeSecureStop(Vector<uint8_t> const& ssid) {
+    Mutex::Autolock autoLock(mLock);
+
+    INIT_CHECK();
+
+    SecureStopId ssidAidl;
+    ssidAidl.secureStopId = toStdVec(ssid);
+    ::ndk::ScopedAStatus status = mPlugin->removeSecureStop(ssidAidl);
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::removeAllSecureStops() {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    ::ndk::ScopedAStatus status = mPlugin->releaseAllSecureStops();
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
+                                   DrmPlugin::HdcpLevel* max) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    if (connected == NULL || max == NULL) {
+        return BAD_VALUE;
+    }
+
+    *connected = DrmPlugin::kHdcpLevelUnknown;
+    *max = DrmPlugin::kHdcpLevelUnknown;
+
+    HdcpLevels lvlsAidl;
+    ::ndk::ScopedAStatus status = mPlugin->getHdcpLevels(&lvlsAidl);
+
+    *connected = toHdcpLevel(lvlsAidl.connectedLevel);
+    *max = toHdcpLevel(lvlsAidl.maxLevel);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    if (open == NULL || max == NULL) {
+        return BAD_VALUE;
+    }
+
+    *open = 0;
+    *max = 0;
+
+    NumberOfSessions result;
+    ::ndk::ScopedAStatus status = mPlugin->getNumberOfSessions(&result);
+
+    *open = result.currentSessions;
+    *max = result.maxSessions;
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                      DrmPlugin::SecurityLevel* level) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    if (level == NULL) {
+        return BAD_VALUE;
+    }
+
+    *level = DrmPlugin::kSecurityLevelUnknown;
+
+    SecurityLevel result;
+    ::ndk::ScopedAStatus status = mPlugin->getSecurityLevel(toStdVec(sessionId), &result);
+
+    *level = toSecurityLevel(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    std::vector<KeySetId> result;
+    ::ndk::ScopedAStatus status = mPlugin->getOfflineLicenseKeySetIds(&result);
+
+    keySetIds = toKeySetIds(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    KeySetId keySetIdAidl;
+    keySetIdAidl.keySetId = toStdVec(keySetId);
+    ::ndk::ScopedAStatus status = mPlugin->removeOfflineLicense(keySetIdAidl);
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                            DrmPlugin::OfflineLicenseState* licenseState) const {
+    Mutex::Autolock autoLock(mLock);
+
+    INIT_CHECK();
+    *licenseState = DrmPlugin::kOfflineLicenseStateUnknown;
+
+    KeySetId keySetIdAidl;
+    keySetIdAidl.keySetId = toStdVec(keySetId);
+
+    OfflineLicenseState result;
+    ::ndk::ScopedAStatus status = mPlugin->getOfflineLicenseState(keySetIdAidl, &result);
+
+    *licenseState = toOfflineLicenseState(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getPropertyString(String8 const& name, String8& value) const {
+    Mutex::Autolock autoLock(mLock);
+    return getPropertyStringInternal(name, value);
+}
+
+status_t DrmHalAidl::getPropertyStringInternal(String8 const& name, String8& value) const {
+    // This function is internal to the class and should only be called while
+    // mLock is already held.
+    INIT_CHECK();
+
+    std::string result;
+    ::ndk::ScopedAStatus status = mPlugin->getPropertyString(toStdString(name), &result);
+
+    value = toString8(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
+    Mutex::Autolock autoLock(mLock);
+    return getPropertyByteArrayInternal(name, value);
+}
+
+status_t DrmHalAidl::getPropertyByteArrayInternal(String8 const& name,
+                                                  Vector<uint8_t>& value) const {
+    // This function is internal to the class and should only be called while
+    // mLock is already held.
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    std::vector<uint8_t> result;
+    ::ndk::ScopedAStatus status = mPlugin->getPropertyByteArray(toStdString(name), &result);
+
+    value = toVector(result);
+    err = statusAidlToStatusT(status);
+    if (name == kPropertyDeviceUniqueId) {
+        mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
+    }
+    return err;
+}
+
+status_t DrmHalAidl::setPropertyString(String8 const& name, String8 const& value) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    ::ndk::ScopedAStatus status = mPlugin->setPropertyString(toStdString(name), toStdString(value));
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    ::ndk::ScopedAStatus status = mPlugin->setPropertyByteArray(toStdString(name), toStdVec(value));
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+    if (consumer == nullptr) {
+        return UNEXPECTED_NULL;
+    }
+    consumer->consumeFrameworkMetrics(mMetrics);
+
+    // Append vendor metrics if they are supported.
+
+    String8 vendor;
+    String8 description;
+    if (getPropertyStringInternal(String8("vendor"), vendor) != OK || vendor.isEmpty()) {
+        ALOGE("Get vendor failed or is empty");
+        vendor = "NONE";
+    }
+    if (getPropertyStringInternal(String8("description"), description) != OK ||
+        description.isEmpty()) {
+        ALOGE("Get description failed or is empty.");
+        description = "NONE";
+    }
+    vendor += ".";
+    vendor += description;
+
+    hidl_vec<DrmMetricGroupHidl> pluginMetrics;
+    status_t err = UNKNOWN_ERROR;
+
+    std::vector<DrmMetricGroupAidl> result;
+    ::ndk::ScopedAStatus status = mPlugin->getMetrics(&result);
+
+    if (status.isOk()) {
+        pluginMetrics = toDrmMetricGroupHidl(result);
+        consumer->consumeHidlMetrics(vendor, pluginMetrics);
+    }
+
+    err = statusAidlToStatusT(status);
+
+    return err;
+}
+
+status_t DrmHalAidl::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                        String8 const& algorithm) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    ::ndk::ScopedAStatus status =
+            mPlugin->setCipherAlgorithm(toStdVec(sessionId), toStdString(algorithm));
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    ::ndk::ScopedAStatus status =
+            mPlugin->setMacAlgorithm(toStdVec(sessionId), toStdString(algorithm));
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                             Vector<uint8_t>& output) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    std::vector<uint8_t> result;
+    ::ndk::ScopedAStatus status = mPlugin->encrypt(toStdVec(sessionId), toStdVec(keyId),
+                                                   toStdVec(input), toStdVec(iv), &result);
+
+    output = toVector(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                             Vector<uint8_t>& output) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    std::vector<uint8_t> result;
+    ::ndk::ScopedAStatus status = mPlugin->decrypt(toStdVec(sessionId), toStdVec(keyId),
+                                                   toStdVec(input), toStdVec(iv), &result);
+
+    output = toVector(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                          Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    std::vector<uint8_t> result;
+    ::ndk::ScopedAStatus status =
+            mPlugin->sign(toStdVec(sessionId), toStdVec(keyId), toStdVec(message), &result);
+
+    signature = toVector(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                            Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                            bool& match) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    ::ndk::ScopedAStatus status = mPlugin->verify(toStdVec(sessionId), toStdVec(keyId),
+                                                  toStdVec(message), toStdVec(signature), &match);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                             Vector<uint8_t>& signature) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    std::vector<uint8_t> result;
+    ::ndk::ScopedAStatus status =
+            mPlugin->signRSA(toStdVec(sessionId), toStdString(algorithm), toStdVec(message),
+                             toStdVec(wrappedKey), &result);
+
+    signature = toVector(result);
+
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::requiresSecureDecoder(const char* mime, bool* required) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    std::string mimeAidl(mime);
+    ::ndk::ScopedAStatus status =
+        mPlugin->requiresSecureDecoder(mimeAidl, SecurityLevel::DEFAULT, required);
+    if (!status.isOk()) {
+        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %d", status.getServiceSpecificError());
+        return DEAD_OBJECT;
+    }
+
+    return OK;
+}
+
+status_t DrmHalAidl::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
+                                           bool* required) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    auto aLevel = toAidlSecurityLevel(securityLevel);
+    std::string mimeAidl(mime);
+    ::ndk::ScopedAStatus status = mPlugin->requiresSecureDecoder(mimeAidl, aLevel, required);
+
+    status_t err = statusAidlToStatusT(status);
+    if (!status.isOk()) {
+        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %d", status.getServiceSpecificError());
+    }
+
+    return err;
+}
+
+status_t DrmHalAidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+    std::string playbackIdAidl(playbackId);
+    ::ndk::ScopedAStatus status = mPlugin->setPlaybackId(toStdVec(sessionId), playbackIdAidl);
+    return statusAidlToStatusT(status);
+}
+
+status_t DrmHalAidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    Mutex::Autolock autoLock(mLock);
+    return DrmUtils::GetLogMessagesAidl<IDrmPluginAidl>(mPlugin, logs);
+}
+
+void DrmHalAidl::closeOpenSessions() {
+    Mutex::Autolock autoLock(mLock);
+    auto openSessions = mOpenSessions;
+    for (size_t i = 0; i < openSessions.size(); i++) {
+        mLock.unlock();
+        closeSession(openSessions[i]->mSessionId);
+        mLock.lock();
+    }
+    mOpenSessions.clear();
+}
+
+std::string DrmHalAidl::reportPluginMetrics() const {
+    Vector<uint8_t> metricsVector;
+    String8 vendor;
+    String8 description;
+    std::string metricsString;
+    if (getPropertyStringInternal(String8("vendor"), vendor) == OK &&
+        getPropertyStringInternal(String8("description"), description) == OK &&
+        getPropertyByteArrayInternal(String8("metrics"), metricsVector) == OK) {
+        metricsString = toBase64StringNoPad(metricsVector.array(), metricsVector.size());
+        status_t res = android::reportDrmPluginMetrics(metricsString, vendor, description,
+                                                       mMetrics.GetAppUid());
+        if (res != OK) {
+            ALOGE("Metrics were retrieved but could not be reported: %d", res);
+        }
+    }
+    return metricsString;
+}
+
+std::string DrmHalAidl::reportFrameworkMetrics(const std::string& pluginMetrics) const {
+    mediametrics_handle_t item(mediametrics_create("mediadrm"));
+    mediametrics_setUid(item, mMetrics.GetAppUid());
+    String8 vendor;
+    String8 description;
+    status_t result = getPropertyStringInternal(String8("vendor"), vendor);
+    if (result != OK) {
+        ALOGE("Failed to get vendor from drm plugin: %d", result);
+    } else {
+        mediametrics_setCString(item, "vendor", vendor.c_str());
+    }
+    result = getPropertyStringInternal(String8("description"), description);
+    if (result != OK) {
+        ALOGE("Failed to get description from drm plugin: %d", result);
+    } else {
+        mediametrics_setCString(item, "description", description.c_str());
+    }
+
+    std::string serializedMetrics;
+    result = mMetrics.GetSerializedMetrics(&serializedMetrics);
+    if (result != OK) {
+        ALOGE("Failed to serialize framework metrics: %d", result);
+    }
+    std::string b64EncodedMetrics =
+            toBase64StringNoPad(serializedMetrics.data(), serializedMetrics.size());
+    if (!b64EncodedMetrics.empty()) {
+        mediametrics_setCString(item, "serialized_metrics", b64EncodedMetrics.c_str());
+    }
+    if (!pluginMetrics.empty()) {
+        mediametrics_setCString(item, "plugin_metrics", pluginMetrics.c_str());
+    }
+    if (!mediametrics_selfRecord(item)) {
+        ALOGE("Failed to self record framework metrics");
+    }
+    mediametrics_delete(item);
+    return serializedMetrics;
+}
+
+status_t DrmHalAidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mFactories.empty()) return UNKNOWN_ERROR;
+    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        CryptoSchemes curSchemes{};
+        auto err = mFactories[i]->getSupportedCryptoSchemes(&curSchemes);
+        if (!err.isOk()) {
+            continue;
+        }
+
+        for (auto uuidObj : curSchemes.uuids) {
+            schemes.insert(schemes.end(), uuidObj.uuid.begin(), uuidObj.uuid.end());
+        }
+    }
+
+    return OK;
+}
+
+void DrmHalAidl::cleanup() {
+    closeOpenSessions();
+
+    Mutex::Autolock autoLock(mLock);
+    reportFrameworkMetrics(reportPluginMetrics());
+
+    setListener(NULL);
+    mInitCheck = NO_INIT;
+    if (mPlugin != NULL) {
+        if (!mPlugin->setListener(NULL).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        }
+    }
+
+    mPlugin.reset();
+}
+
+status_t DrmHalAidl::destroyPlugin() {
+    cleanup();
+    return OK;
+}
+
+::ndk::ScopedAStatus DrmHalAidl::onEvent(EventTypeAidl eventTypeAidl,
+                                         const std::vector<uint8_t>& sessionId,
+                                         const std::vector<uint8_t>& data) {
+    return mListener->onEvent(eventTypeAidl, sessionId, data);
+}
+
+::ndk::ScopedAStatus DrmHalAidl::onExpirationUpdate(const std::vector<uint8_t>& sessionId,
+                                                    int64_t expiryTimeInMS) {
+    return mListener->onExpirationUpdate(sessionId, expiryTimeInMS);
+}
+
+::ndk::ScopedAStatus DrmHalAidl::onKeysChange(const std::vector<uint8_t>& sessionId,
+                                              const std::vector<KeyStatus>& keyStatusListAidl,
+                                              bool hasNewUsableKey) {
+    return mListener->onKeysChange(sessionId, keyStatusListAidl, hasNewUsableKey);
+}
+
+::ndk::ScopedAStatus DrmHalAidl::onSessionLostState(const std::vector<uint8_t>& sessionId) {
+    return mListener->onSessionLostState(sessionId);
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
new file mode 100644
index 0000000..c38dbef
--- /dev/null
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -0,0 +1,1537 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DrmHalHidl"
+
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <android/binder_manager.h>
+#include <android/hardware/drm/1.2/types.h>
+#include <android/hardware/drm/1.3/IDrmFactory.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <hidl/ServiceManagement.h>
+#include <media/EventMetric.h>
+#include <media/MediaMetrics.h>
+#include <media/PluginMetricsReporting.h>
+#include <media/drm/DrmAPI.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <mediadrm/DrmHalHidl.h>
+#include <mediadrm/DrmSessionClientInterface.h>
+#include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmUtils.h>
+#include <mediadrm/IDrmMetricsConsumer.h>
+#include <utils/Log.h>
+
+#include <iomanip>
+#include <vector>
+
+using ::android::sp;
+using ::android::DrmUtils::toStatusT;
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::drm::V1_1::DrmMetricGroup;
+using ::android::os::PersistableBundle;
+using drm::V1_0::KeyedVector;
+using drm::V1_0::KeyRequestType;
+using drm::V1_0::KeyType;
+using drm::V1_0::KeyValue;
+using drm::V1_0::SecureStop;
+using drm::V1_0::SecureStopId;
+using drm::V1_0::Status;
+using drm::V1_1::HdcpLevel;
+using drm::V1_1::SecureStopRelease;
+using drm::V1_1::SecurityLevel;
+using drm::V1_2::KeySetId;
+using drm::V1_2::KeyStatusType;
+
+typedef drm::V1_1::KeyRequestType KeyRequestType_V1_1;
+typedef drm::V1_2::Status Status_V1_2;
+typedef drm::V1_2::HdcpLevel HdcpLevel_V1_2;
+
+namespace {
+
+// This constant corresponds to the PROPERTY_DEVICE_UNIQUE_ID constant
+// in the MediaDrm API.
+constexpr char kPropertyDeviceUniqueId[] = "deviceUniqueId";
+constexpr char kEqualsSign[] = "=";
+
+template <typename T>
+std::string toBase64StringNoPad(const T* data, size_t size) {
+    // Note that the base 64 conversion only works with arrays of single-byte
+    // values. If the source is empty or is not an array of single-byte values,
+    // return empty string.
+    if (size == 0 || sizeof(data[0]) != 1) {
+        return "";
+    }
+
+    android::AString outputString;
+    encodeBase64(data, size, &outputString);
+    // Remove trailing equals padding if it exists.
+    while (outputString.size() > 0 && outputString.endsWith(kEqualsSign)) {
+        outputString.erase(outputString.size() - 1, 1);
+    }
+
+    return std::string(outputString.c_str(), outputString.size());
+}
+
+}  // anonymous namespace
+
+namespace android {
+
+#define INIT_CHECK()                             \
+    {                                            \
+        if (mInitCheck != OK) return mInitCheck; \
+    }
+
+static const Vector<uint8_t> toVector(const hidl_vec<uint8_t>& vec) {
+    Vector<uint8_t> vector;
+    vector.appendArray(vec.data(), vec.size());
+    return *const_cast<const Vector<uint8_t>*>(&vector);
+}
+
+static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t>& vector) {
+    hidl_vec<uint8_t> vec;
+    vec.setToExternal(const_cast<uint8_t*>(vector.array()), vector.size());
+    return vec;
+}
+
+static String8 toString8(const hidl_string& string) {
+    return String8(string.c_str());
+}
+
+static hidl_string toHidlString(const String8& string) {
+    return hidl_string(string.string());
+}
+
+static DrmPlugin::SecurityLevel toSecurityLevel(SecurityLevel level) {
+    switch (level) {
+        case SecurityLevel::SW_SECURE_CRYPTO:
+            return DrmPlugin::kSecurityLevelSwSecureCrypto;
+        case SecurityLevel::SW_SECURE_DECODE:
+            return DrmPlugin::kSecurityLevelSwSecureDecode;
+        case SecurityLevel::HW_SECURE_CRYPTO:
+            return DrmPlugin::kSecurityLevelHwSecureCrypto;
+        case SecurityLevel::HW_SECURE_DECODE:
+            return DrmPlugin::kSecurityLevelHwSecureDecode;
+        case SecurityLevel::HW_SECURE_ALL:
+            return DrmPlugin::kSecurityLevelHwSecureAll;
+        default:
+            return DrmPlugin::kSecurityLevelUnknown;
+    }
+}
+
+static SecurityLevel toHidlSecurityLevel(DrmPlugin::SecurityLevel level) {
+    switch (level) {
+        case DrmPlugin::kSecurityLevelSwSecureCrypto:
+            return SecurityLevel::SW_SECURE_CRYPTO;
+        case DrmPlugin::kSecurityLevelSwSecureDecode:
+            return SecurityLevel::SW_SECURE_DECODE;
+        case DrmPlugin::kSecurityLevelHwSecureCrypto:
+            return SecurityLevel::HW_SECURE_CRYPTO;
+        case DrmPlugin::kSecurityLevelHwSecureDecode:
+            return SecurityLevel::HW_SECURE_DECODE;
+        case DrmPlugin::kSecurityLevelHwSecureAll:
+            return SecurityLevel::HW_SECURE_ALL;
+        default:
+            return SecurityLevel::UNKNOWN;
+    }
+}
+
+static DrmPlugin::OfflineLicenseState toOfflineLicenseState(OfflineLicenseState licenseState) {
+    switch (licenseState) {
+        case OfflineLicenseState::USABLE:
+            return DrmPlugin::kOfflineLicenseStateUsable;
+        case OfflineLicenseState::INACTIVE:
+            return DrmPlugin::kOfflineLicenseStateReleased;
+        default:
+            return DrmPlugin::kOfflineLicenseStateUnknown;
+    }
+}
+
+static DrmPlugin::HdcpLevel toHdcpLevel(HdcpLevel_V1_2 level) {
+    switch (level) {
+        case HdcpLevel_V1_2::HDCP_NONE:
+            return DrmPlugin::kHdcpNone;
+        case HdcpLevel_V1_2::HDCP_V1:
+            return DrmPlugin::kHdcpV1;
+        case HdcpLevel_V1_2::HDCP_V2:
+            return DrmPlugin::kHdcpV2;
+        case HdcpLevel_V1_2::HDCP_V2_1:
+            return DrmPlugin::kHdcpV2_1;
+        case HdcpLevel_V1_2::HDCP_V2_2:
+            return DrmPlugin::kHdcpV2_2;
+        case HdcpLevel_V1_2::HDCP_V2_3:
+            return DrmPlugin::kHdcpV2_3;
+        case HdcpLevel_V1_2::HDCP_NO_OUTPUT:
+            return DrmPlugin::kHdcpNoOutput;
+        default:
+            return DrmPlugin::kHdcpLevelUnknown;
+    }
+}
+static ::KeyedVector toHidlKeyedVector(const KeyedVector<String8, String8>& keyedVector) {
+    std::vector<KeyValue> stdKeyedVector;
+    for (size_t i = 0; i < keyedVector.size(); i++) {
+        KeyValue keyValue;
+        keyValue.key = toHidlString(keyedVector.keyAt(i));
+        keyValue.value = toHidlString(keyedVector.valueAt(i));
+        stdKeyedVector.push_back(keyValue);
+    }
+    return ::KeyedVector(stdKeyedVector);
+}
+
+static KeyedVector<String8, String8> toKeyedVector(const ::KeyedVector& hKeyedVector) {
+    KeyedVector<String8, String8> keyedVector;
+    for (size_t i = 0; i < hKeyedVector.size(); i++) {
+        keyedVector.add(toString8(hKeyedVector[i].key), toString8(hKeyedVector[i].value));
+    }
+    return keyedVector;
+}
+
+static List<Vector<uint8_t>> toSecureStops(const hidl_vec<SecureStop>& hSecureStops) {
+    List<Vector<uint8_t>> secureStops;
+    for (size_t i = 0; i < hSecureStops.size(); i++) {
+        secureStops.push_back(toVector(hSecureStops[i].opaqueData));
+    }
+    return secureStops;
+}
+
+static List<Vector<uint8_t>> toSecureStopIds(const hidl_vec<SecureStopId>& hSecureStopIds) {
+    List<Vector<uint8_t>> secureStopIds;
+    for (size_t i = 0; i < hSecureStopIds.size(); i++) {
+        secureStopIds.push_back(toVector(hSecureStopIds[i]));
+    }
+    return secureStopIds;
+}
+
+static List<Vector<uint8_t>> toKeySetIds(const hidl_vec<KeySetId>& hKeySetIds) {
+    List<Vector<uint8_t>> keySetIds;
+    for (size_t i = 0; i < hKeySetIds.size(); i++) {
+        keySetIds.push_back(toVector(hKeySetIds[i]));
+    }
+    return keySetIds;
+}
+
+Mutex DrmHalHidl::mLock;
+
+struct DrmHalHidl::DrmSessionClient : public aidl::android::media::BnResourceManagerClient {
+    explicit DrmSessionClient(DrmHalHidl* drm, const Vector<uint8_t>& sessionId)
+        : mSessionId(sessionId), mDrm(drm) {}
+
+    ::ndk::ScopedAStatus reclaimResource(bool* _aidl_return) override;
+    ::ndk::ScopedAStatus getName(::std::string* _aidl_return) override;
+
+    const Vector<uint8_t> mSessionId;
+
+    virtual ~DrmSessionClient();
+
+  private:
+    wp<DrmHalHidl> mDrm;
+
+    DISALLOW_EVIL_CONSTRUCTORS(DrmSessionClient);
+};
+
+::ndk::ScopedAStatus DrmHalHidl::DrmSessionClient::reclaimResource(bool* _aidl_return) {
+    auto sessionId = mSessionId;
+    sp<DrmHalHidl> drm = mDrm.promote();
+    if (drm == NULL) {
+        *_aidl_return = true;
+        return ::ndk::ScopedAStatus::ok();
+    }
+    status_t err = drm->closeSession(sessionId);
+    if (err != OK) {
+        *_aidl_return = false;
+        return ::ndk::ScopedAStatus::ok();
+    }
+    drm->sendEvent(EventType::SESSION_RECLAIMED, toHidlVec(sessionId), hidl_vec<uint8_t>());
+    *_aidl_return = true;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalHidl::DrmSessionClient::getName(::std::string* _aidl_return) {
+    String8 name;
+    sp<DrmHalHidl> drm = mDrm.promote();
+    if (drm == NULL) {
+        name.append("<deleted>");
+    } else if (drm->getPropertyStringInternal(String8("vendor"), name) != OK || name.isEmpty()) {
+        name.append("<Get vendor failed or is empty>");
+    }
+    name.append("[");
+    for (size_t i = 0; i < mSessionId.size(); ++i) {
+        name.appendFormat("%02x", mSessionId[i]);
+    }
+    name.append("]");
+    *_aidl_return = name;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+DrmHalHidl::DrmSessionClient::~DrmSessionClient() {
+    DrmSessionManager::Instance()->removeSession(mSessionId);
+}
+
+DrmHalHidl::DrmHalHidl()
+    : mFactories(makeDrmFactories()),
+      mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {}
+
+void DrmHalHidl::closeOpenSessions() {
+    Mutex::Autolock autoLock(mLock);
+    auto openSessions = mOpenSessions;
+    for (size_t i = 0; i < openSessions.size(); i++) {
+        mLock.unlock();
+        closeSession(openSessions[i]->mSessionId);
+        mLock.lock();
+    }
+    mOpenSessions.clear();
+}
+
+DrmHalHidl::~DrmHalHidl() {}
+
+void DrmHalHidl::cleanup() {
+    closeOpenSessions();
+
+    Mutex::Autolock autoLock(mLock);
+    reportFrameworkMetrics(reportPluginMetrics());
+
+    setListener(NULL);
+    mInitCheck = NO_INIT;
+    if (mPluginV1_2 != NULL) {
+        if (!mPluginV1_2->setListener(NULL).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        }
+    } else if (mPlugin != NULL) {
+        if (!mPlugin->setListener(NULL).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        }
+    }
+    mPlugin.clear();
+    mPluginV1_1.clear();
+    mPluginV1_2.clear();
+    mPluginV1_4.clear();
+}
+
+std::vector<sp<IDrmFactory>> DrmHalHidl::makeDrmFactories() {
+    static std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
+    if (factories.size() == 0) {
+        DrmUtils::LOG2BI("No hidl drm factories found");
+        // could be in passthrough mode, load the default passthrough service
+        auto passthrough = IDrmFactory::getService();
+        if (passthrough != NULL) {
+            DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
+            factories.push_back(passthrough);
+        } else {
+            DrmUtils::LOG2BE("Failed to find passthrough drm factories");
+        }
+    }
+    return factories;
+}
+
+sp<IDrmPlugin> DrmHalHidl::makeDrmPlugin(const sp<IDrmFactory>& factory, const uint8_t uuid[16],
+                                         const String8& appPackageName) {
+    mAppPackageName = appPackageName;
+    mMetrics.SetAppPackageName(appPackageName);
+    mMetrics.SetAppUid(AIBinder_getCallingUid());
+
+    sp<IDrmPlugin> plugin;
+    Return<void> hResult = factory->createPlugin(
+            uuid, appPackageName.string(), [&](Status status, const sp<IDrmPlugin>& hPlugin) {
+                if (status != Status::OK) {
+                    DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d", status);
+                    return;
+                }
+                plugin = hPlugin;
+            });
+
+    if (!hResult.isOk()) {
+        DrmUtils::LOG2BE(uuid, "createPlugin remote call failed: %s",
+                         hResult.description().c_str());
+    }
+
+    return plugin;
+}
+
+status_t DrmHalHidl::initCheck() const {
+    return mInitCheck;
+}
+
+status_t DrmHalHidl::setListener(const sp<IDrmClient>& listener) {
+    Mutex::Autolock lock(mEventLock);
+    mListener = listener;
+    return NO_ERROR;
+}
+
+Return<void> DrmHalHidl::sendEvent(EventType hEventType, const hidl_vec<uint8_t>& sessionId,
+                                   const hidl_vec<uint8_t>& data) {
+    mMetrics.mEventCounter.Increment((uint32_t)hEventType);
+
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        Mutex::Autolock lock(mNotifyLock);
+        DrmPlugin::EventType eventType;
+        switch (hEventType) {
+            case EventType::PROVISION_REQUIRED:
+                eventType = DrmPlugin::kDrmPluginEventProvisionRequired;
+                break;
+            case EventType::KEY_NEEDED:
+                eventType = DrmPlugin::kDrmPluginEventKeyNeeded;
+                break;
+            case EventType::KEY_EXPIRED:
+                eventType = DrmPlugin::kDrmPluginEventKeyExpired;
+                break;
+            case EventType::VENDOR_DEFINED:
+                eventType = DrmPlugin::kDrmPluginEventVendorDefined;
+                break;
+            case EventType::SESSION_RECLAIMED:
+                eventType = DrmPlugin::kDrmPluginEventSessionReclaimed;
+                break;
+            default:
+                return Void();
+        }
+        listener->sendEvent(eventType, sessionId, data);
+    }
+    return Void();
+}
+
+Return<void> DrmHalHidl::sendExpirationUpdate(const hidl_vec<uint8_t>& sessionId,
+                                              int64_t expiryTimeInMS) {
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        Mutex::Autolock lock(mNotifyLock);
+        listener->sendExpirationUpdate(sessionId, expiryTimeInMS);
+    }
+    return Void();
+}
+
+Return<void> DrmHalHidl::sendKeysChange(const hidl_vec<uint8_t>& sessionId,
+                                        const hidl_vec<KeyStatus_V1_0>& keyStatusList_V1_0,
+                                        bool hasNewUsableKey) {
+    std::vector<KeyStatus> keyStatusVec;
+    for (const auto& keyStatus_V1_0 : keyStatusList_V1_0) {
+        keyStatusVec.push_back(
+                {keyStatus_V1_0.keyId, static_cast<KeyStatusType>(keyStatus_V1_0.type)});
+    }
+    hidl_vec<KeyStatus> keyStatusList_V1_2(keyStatusVec);
+    return sendKeysChange_1_2(sessionId, keyStatusList_V1_2, hasNewUsableKey);
+}
+
+Return<void> DrmHalHidl::sendKeysChange_1_2(const hidl_vec<uint8_t>& sessionId,
+                                            const hidl_vec<KeyStatus>& hKeyStatusList,
+                                            bool hasNewUsableKey) {
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        std::vector<DrmKeyStatus> keyStatusList;
+        size_t nKeys = hKeyStatusList.size();
+        for (size_t i = 0; i < nKeys; ++i) {
+            const KeyStatus& keyStatus = hKeyStatusList[i];
+            uint32_t type;
+            switch (keyStatus.type) {
+                case KeyStatusType::USABLE:
+                    type = DrmPlugin::kKeyStatusType_Usable;
+                    break;
+                case KeyStatusType::EXPIRED:
+                    type = DrmPlugin::kKeyStatusType_Expired;
+                    break;
+                case KeyStatusType::OUTPUTNOTALLOWED:
+                    type = DrmPlugin::kKeyStatusType_OutputNotAllowed;
+                    break;
+                case KeyStatusType::STATUSPENDING:
+                    type = DrmPlugin::kKeyStatusType_StatusPending;
+                    break;
+                case KeyStatusType::USABLEINFUTURE:
+                    type = DrmPlugin::kKeyStatusType_UsableInFuture;
+                    break;
+                case KeyStatusType::INTERNALERROR:
+                default:
+                    type = DrmPlugin::kKeyStatusType_InternalError;
+                    break;
+            }
+            keyStatusList.push_back({type, keyStatus.keyId});
+            mMetrics.mKeyStatusChangeCounter.Increment((uint32_t)keyStatus.type);
+        }
+
+        Mutex::Autolock lock(mNotifyLock);
+        listener->sendKeysChange(sessionId, keyStatusList, hasNewUsableKey);
+    } else {
+        // There's no listener. But we still want to count the key change
+        // events.
+        size_t nKeys = hKeyStatusList.size();
+        for (size_t i = 0; i < nKeys; i++) {
+            mMetrics.mKeyStatusChangeCounter.Increment((uint32_t)hKeyStatusList[i].type);
+        }
+    }
+
+    return Void();
+}
+
+Return<void> DrmHalHidl::sendSessionLostState(const hidl_vec<uint8_t>& sessionId) {
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        Mutex::Autolock lock(mNotifyLock);
+        listener->sendSessionLostState(sessionId);
+    }
+    return Void();
+}
+
+status_t DrmHalHidl::matchMimeTypeAndSecurityLevel(const sp<IDrmFactory>& factory,
+                                                   const uint8_t uuid[16], const String8& mimeType,
+                                                   DrmPlugin::SecurityLevel level,
+                                                   bool* isSupported) {
+    *isSupported = false;
+
+    // handle default value cases
+    if (level == DrmPlugin::kSecurityLevelUnknown) {
+        if (mimeType == "") {
+            // isCryptoSchemeSupported(uuid)
+            *isSupported = true;
+        } else {
+            // isCryptoSchemeSupported(uuid, mimeType)
+            *isSupported = factory->isContentTypeSupported(mimeType.string());
+        }
+        return OK;
+    } else if (mimeType == "") {
+        return BAD_VALUE;
+    }
+
+    sp<drm::V1_2::IDrmFactory> factoryV1_2 = drm::V1_2::IDrmFactory::castFrom(factory);
+    if (factoryV1_2 == NULL) {
+        return ERROR_UNSUPPORTED;
+    } else {
+        *isSupported = factoryV1_2->isCryptoSchemeSupported_1_2(uuid, mimeType.string(),
+                                                                toHidlSecurityLevel(level));
+        return OK;
+    }
+}
+
+status_t DrmHalHidl::isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                             DrmPlugin::SecurityLevel level, bool* isSupported) {
+    Mutex::Autolock autoLock(mLock);
+    *isSupported = false;
+    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+            return matchMimeTypeAndSecurityLevel(mFactories[i], uuid, mimeType, level, isSupported);
+        }
+    }
+    return OK;
+}
+
+status_t DrmHalHidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
+    Mutex::Autolock autoLock(mLock);
+
+    for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
+        if (hResult.isOk() && hResult) {
+            auto plugin = makeDrmPlugin(mFactories[i], uuid, appPackageName);
+            if (plugin != NULL) {
+                mPlugin = plugin;
+                mPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mPlugin);
+                mPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mPlugin);
+                mPluginV1_4 = drm::V1_4::IDrmPlugin::castFrom(mPlugin);
+                break;
+            }
+        }
+    }
+
+    if (mPlugin == NULL) {
+        DrmUtils::LOG2BE(uuid, "No supported hal instance found");
+        mInitCheck = ERROR_UNSUPPORTED;
+    } else {
+        mInitCheck = OK;
+        if (mPluginV1_2 != NULL) {
+            if (!mPluginV1_2->setListener(this).isOk()) {
+                mInitCheck = DEAD_OBJECT;
+            }
+        } else if (!mPlugin->setListener(this).isOk()) {
+            mInitCheck = DEAD_OBJECT;
+        }
+        if (mInitCheck != OK) {
+            mPlugin.clear();
+            mPluginV1_1.clear();
+            mPluginV1_2.clear();
+            mPluginV1_4.clear();
+        }
+    }
+
+    return mInitCheck;
+}
+
+status_t DrmHalHidl::destroyPlugin() {
+    cleanup();
+    return OK;
+}
+
+status_t DrmHalHidl::openSession(DrmPlugin::SecurityLevel level, Vector<uint8_t>& sessionId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    SecurityLevel hSecurityLevel = toHidlSecurityLevel(level);
+    bool setSecurityLevel = true;
+
+    if (level == DrmPlugin::kSecurityLevelMax) {
+        setSecurityLevel = false;
+    } else {
+        if (hSecurityLevel == SecurityLevel::UNKNOWN) {
+            return ERROR_DRM_CANNOT_HANDLE;
+        }
+    }
+
+    status_t err = UNKNOWN_ERROR;
+    bool retry = true;
+    do {
+        hidl_vec<uint8_t> hSessionId;
+
+        Return<void> hResult;
+        if (mPluginV1_1 == NULL || !setSecurityLevel) {
+            hResult = mPlugin->openSession([&](Status status, const hidl_vec<uint8_t>& id) {
+                if (status == Status::OK) {
+                    sessionId = toVector(id);
+                }
+                err = toStatusT(status);
+            });
+        } else {
+            hResult = mPluginV1_1->openSession_1_1(hSecurityLevel,
+                                                   [&](Status status, const hidl_vec<uint8_t>& id) {
+                                                       if (status == Status::OK) {
+                                                           sessionId = toVector(id);
+                                                       }
+                                                       err = toStatusT(status);
+                                                   });
+        }
+
+        if (!hResult.isOk()) {
+            err = DEAD_OBJECT;
+        }
+
+        if (err == ERROR_DRM_RESOURCE_BUSY && retry) {
+            mLock.unlock();
+            // reclaimSession may call back to closeSession, since mLock is
+            // shared between Drm instances, we should unlock here to avoid
+            // deadlock.
+            retry = DrmSessionManager::Instance()->reclaimSession(AIBinder_getCallingPid());
+            mLock.lock();
+        } else {
+            retry = false;
+        }
+    } while (retry);
+
+    if (err == OK) {
+        std::shared_ptr<DrmSessionClient> client =
+                ndk::SharedRefBase::make<DrmSessionClient>(this, sessionId);
+        DrmSessionManager::Instance()->addSession(
+                AIBinder_getCallingPid(), std::static_pointer_cast<IResourceManagerClient>(client),
+                sessionId);
+        mOpenSessions.push_back(client);
+        mMetrics.SetSessionStart(sessionId);
+    }
+
+    mMetrics.mOpenSessionCounter.Increment(err);
+    return err;
+}
+
+status_t DrmHalHidl::closeSession(Vector<uint8_t> const& sessionId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    Return<Status> status = mPlugin->closeSession(toHidlVec(sessionId));
+    if (status.isOk()) {
+        if (status == Status::OK) {
+            DrmSessionManager::Instance()->removeSession(sessionId);
+            for (auto i = mOpenSessions.begin(); i != mOpenSessions.end(); i++) {
+                if (isEqualSessionId((*i)->mSessionId, sessionId)) {
+                    mOpenSessions.erase(i);
+                    break;
+                }
+            }
+        }
+        status_t response = toStatusT(status);
+        mMetrics.SetSessionEnd(sessionId);
+        mMetrics.mCloseSessionCounter.Increment(response);
+        return response;
+    }
+    mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
+    return DEAD_OBJECT;
+}
+
+static DrmPlugin::KeyRequestType toKeyRequestType(KeyRequestType keyRequestType) {
+    switch (keyRequestType) {
+        case KeyRequestType::INITIAL:
+            return DrmPlugin::kKeyRequestType_Initial;
+            break;
+        case KeyRequestType::RENEWAL:
+            return DrmPlugin::kKeyRequestType_Renewal;
+            break;
+        case KeyRequestType::RELEASE:
+            return DrmPlugin::kKeyRequestType_Release;
+            break;
+        default:
+            return DrmPlugin::kKeyRequestType_Unknown;
+            break;
+    }
+}
+
+static DrmPlugin::KeyRequestType toKeyRequestType_1_1(KeyRequestType_V1_1 keyRequestType) {
+    switch (keyRequestType) {
+        case KeyRequestType_V1_1::NONE:
+            return DrmPlugin::kKeyRequestType_None;
+            break;
+        case KeyRequestType_V1_1::UPDATE:
+            return DrmPlugin::kKeyRequestType_Update;
+            break;
+        default:
+            return toKeyRequestType(static_cast<KeyRequestType>(keyRequestType));
+            break;
+    }
+}
+
+status_t DrmHalHidl::getKeyRequest(Vector<uint8_t> const& sessionId,
+                                   Vector<uint8_t> const& initData, String8 const& mimeType,
+                                   DrmPlugin::KeyType keyType,
+                                   KeyedVector<String8, String8> const& optionalParameters,
+                                   Vector<uint8_t>& request, String8& defaultUrl,
+                                   DrmPlugin::KeyRequestType* keyRequestType) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+    EventTimer<status_t> keyRequestTimer(&mMetrics.mGetKeyRequestTimeUs);
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    KeyType hKeyType;
+    if (keyType == DrmPlugin::kKeyType_Streaming) {
+        hKeyType = KeyType::STREAMING;
+    } else if (keyType == DrmPlugin::kKeyType_Offline) {
+        hKeyType = KeyType::OFFLINE;
+    } else if (keyType == DrmPlugin::kKeyType_Release) {
+        hKeyType = KeyType::RELEASE;
+    } else {
+        keyRequestTimer.SetAttribute(BAD_VALUE);
+        return BAD_VALUE;
+    }
+
+    ::KeyedVector hOptionalParameters = toHidlKeyedVector(optionalParameters);
+
+    status_t err = UNKNOWN_ERROR;
+    Return<void> hResult;
+
+    if (mPluginV1_2 != NULL) {
+        hResult = mPluginV1_2->getKeyRequest_1_2(
+                toHidlVec(sessionId), toHidlVec(initData), toHidlString(mimeType), hKeyType,
+                hOptionalParameters,
+                [&](Status_V1_2 status, const hidl_vec<uint8_t>& hRequest,
+                    KeyRequestType_V1_1 hKeyRequestType, const hidl_string& hDefaultUrl) {
+                    if (status == Status_V1_2::OK) {
+                        request = toVector(hRequest);
+                        defaultUrl = toString8(hDefaultUrl);
+                        *keyRequestType = toKeyRequestType_1_1(hKeyRequestType);
+                    }
+                    err = toStatusT(status);
+                });
+    } else if (mPluginV1_1 != NULL) {
+        hResult = mPluginV1_1->getKeyRequest_1_1(
+                toHidlVec(sessionId), toHidlVec(initData), toHidlString(mimeType), hKeyType,
+                hOptionalParameters,
+                [&](Status status, const hidl_vec<uint8_t>& hRequest,
+                    KeyRequestType_V1_1 hKeyRequestType, const hidl_string& hDefaultUrl) {
+                    if (status == Status::OK) {
+                        request = toVector(hRequest);
+                        defaultUrl = toString8(hDefaultUrl);
+                        *keyRequestType = toKeyRequestType_1_1(hKeyRequestType);
+                    }
+                    err = toStatusT(status);
+                });
+    } else {
+        hResult = mPlugin->getKeyRequest(
+                toHidlVec(sessionId), toHidlVec(initData), toHidlString(mimeType), hKeyType,
+                hOptionalParameters,
+                [&](Status status, const hidl_vec<uint8_t>& hRequest,
+                    KeyRequestType hKeyRequestType, const hidl_string& hDefaultUrl) {
+                    if (status == Status::OK) {
+                        request = toVector(hRequest);
+                        defaultUrl = toString8(hDefaultUrl);
+                        *keyRequestType = toKeyRequestType(hKeyRequestType);
+                    }
+                    err = toStatusT(status);
+                });
+    }
+
+    err = hResult.isOk() ? err : DEAD_OBJECT;
+    keyRequestTimer.SetAttribute(err);
+    return err;
+}
+
+status_t DrmHalHidl::provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                        Vector<uint8_t> const& response,
+                                        Vector<uint8_t>& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+    EventTimer<status_t> keyResponseTimer(&mMetrics.mProvideKeyResponseTimeUs);
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult =
+            mPlugin->provideKeyResponse(toHidlVec(sessionId), toHidlVec(response),
+                                        [&](Status status, const hidl_vec<uint8_t>& hKeySetId) {
+                                            if (status == Status::OK) {
+                                                keySetId = toVector(hKeySetId);
+                                            }
+                                            err = toStatusT(status);
+                                        });
+    err = hResult.isOk() ? err : DEAD_OBJECT;
+    keyResponseTimer.SetAttribute(err);
+    return err;
+}
+
+status_t DrmHalHidl::removeKeys(Vector<uint8_t> const& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    Return<Status> status = mPlugin->removeKeys(toHidlVec(keySetId));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::restoreKeys(Vector<uint8_t> const& sessionId,
+                                 Vector<uint8_t> const& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    Return<Status> status = mPlugin->restoreKeys(toHidlVec(sessionId), toHidlVec(keySetId));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                    KeyedVector<String8, String8>& infoMap) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    ::KeyedVector hInfoMap;
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->queryKeyStatus(
+            toHidlVec(sessionId), [&](Status status, const hidl_vec<KeyValue>& map) {
+                if (status == Status::OK) {
+                    infoMap = toKeyedVector(map);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                         Vector<uint8_t>& request, String8& defaultUrl) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+    Return<void> hResult;
+
+    if (mPluginV1_2 != NULL) {
+        hResult = mPluginV1_2->getProvisionRequest_1_2(
+                toHidlString(certType), toHidlString(certAuthority),
+                [&](Status_V1_2 status, const hidl_vec<uint8_t>& hRequest,
+                    const hidl_string& hDefaultUrl) {
+                    if (status == Status_V1_2::OK) {
+                        request = toVector(hRequest);
+                        defaultUrl = toString8(hDefaultUrl);
+                    }
+                    err = toStatusT(status);
+                });
+    } else {
+        hResult = mPlugin->getProvisionRequest(toHidlString(certType), toHidlString(certAuthority),
+                                               [&](Status status, const hidl_vec<uint8_t>& hRequest,
+                                                   const hidl_string& hDefaultUrl) {
+                                                   if (status == Status::OK) {
+                                                       request = toVector(hRequest);
+                                                       defaultUrl = toString8(hDefaultUrl);
+                                                   }
+                                                   err = toStatusT(status);
+                                               });
+    }
+
+    err = hResult.isOk() ? err : DEAD_OBJECT;
+    mMetrics.mGetProvisionRequestCounter.Increment(err);
+    return err;
+}
+
+status_t DrmHalHidl::provideProvisionResponse(Vector<uint8_t> const& response,
+                                              Vector<uint8_t>& certificate,
+                                              Vector<uint8_t>& wrappedKey) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->provideProvisionResponse(
+            toHidlVec(response), [&](Status status, const hidl_vec<uint8_t>& hCertificate,
+                                     const hidl_vec<uint8_t>& hWrappedKey) {
+                if (status == Status::OK) {
+                    certificate = toVector(hCertificate);
+                    wrappedKey = toVector(hWrappedKey);
+                }
+                err = toStatusT(status);
+            });
+
+    err = hResult.isOk() ? err : DEAD_OBJECT;
+    mMetrics.mProvideProvisionResponseCounter.Increment(err);
+    return err;
+}
+
+status_t DrmHalHidl::getSecureStops(List<Vector<uint8_t>>& secureStops) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult =
+            mPlugin->getSecureStops([&](Status status, const hidl_vec<SecureStop>& hSecureStops) {
+                if (status == Status::OK) {
+                    secureStops = toSecureStops(hSecureStops);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    if (mPluginV1_1 == NULL) {
+        return ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPluginV1_1->getSecureStopIds(
+            [&](Status status, const hidl_vec<SecureStopId>& hSecureStopIds) {
+                if (status == Status::OK) {
+                    secureStopIds = toSecureStopIds(hSecureStopIds);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->getSecureStop(
+            toHidlVec(ssid), [&](Status status, const SecureStop& hSecureStop) {
+                if (status == Status::OK) {
+                    secureStop = toVector(hSecureStop.opaqueData);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    Return<Status> status(Status::ERROR_DRM_UNKNOWN);
+    if (mPluginV1_1 != NULL) {
+        SecureStopRelease secureStopRelease;
+        secureStopRelease.opaqueData = toHidlVec(ssRelease);
+        status = mPluginV1_1->releaseSecureStops(secureStopRelease);
+    } else {
+        status = mPlugin->releaseSecureStop(toHidlVec(ssRelease));
+    }
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::removeSecureStop(Vector<uint8_t> const& ssid) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    if (mPluginV1_1 == NULL) {
+        return ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    Return<Status> status = mPluginV1_1->removeSecureStop(toHidlVec(ssid));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::removeAllSecureStops() {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    Return<Status> status(Status::ERROR_DRM_UNKNOWN);
+    if (mPluginV1_1 != NULL) {
+        status = mPluginV1_1->removeAllSecureStops();
+    } else {
+        status = mPlugin->releaseAllSecureStops();
+    }
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
+                                   DrmPlugin::HdcpLevel* max) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    if (connected == NULL || max == NULL) {
+        return BAD_VALUE;
+    }
+    status_t err = UNKNOWN_ERROR;
+
+    *connected = DrmPlugin::kHdcpLevelUnknown;
+    *max = DrmPlugin::kHdcpLevelUnknown;
+
+    Return<void> hResult;
+    if (mPluginV1_2 != NULL) {
+        hResult = mPluginV1_2->getHdcpLevels_1_2([&](Status_V1_2 status,
+                                                     const HdcpLevel_V1_2& hConnected,
+                                                     const HdcpLevel_V1_2& hMax) {
+            if (status == Status_V1_2::OK) {
+                *connected = toHdcpLevel(hConnected);
+                *max = toHdcpLevel(hMax);
+            }
+            err = toStatusT(status);
+        });
+    } else if (mPluginV1_1 != NULL) {
+        hResult = mPluginV1_1->getHdcpLevels(
+                [&](Status status, const HdcpLevel& hConnected, const HdcpLevel& hMax) {
+                    if (status == Status::OK) {
+                        *connected = toHdcpLevel(static_cast<HdcpLevel_V1_2>(hConnected));
+                        *max = toHdcpLevel(static_cast<HdcpLevel_V1_2>(hMax));
+                    }
+                    err = toStatusT(status);
+                });
+    } else {
+        return ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    if (open == NULL || max == NULL) {
+        return BAD_VALUE;
+    }
+    status_t err = UNKNOWN_ERROR;
+
+    *open = 0;
+    *max = 0;
+
+    if (mPluginV1_1 == NULL) {
+        return ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    Return<void> hResult =
+            mPluginV1_1->getNumberOfSessions([&](Status status, uint32_t hOpen, uint32_t hMax) {
+                if (status == Status::OK) {
+                    *open = hOpen;
+                    *max = hMax;
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                      DrmPlugin::SecurityLevel* level) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    if (level == NULL) {
+        return BAD_VALUE;
+    }
+    status_t err = UNKNOWN_ERROR;
+
+    if (mPluginV1_1 == NULL) {
+        return ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    *level = DrmPlugin::kSecurityLevelUnknown;
+
+    Return<void> hResult = mPluginV1_1->getSecurityLevel(toHidlVec(sessionId),
+                                                         [&](Status status, SecurityLevel hLevel) {
+                                                             if (status == Status::OK) {
+                                                                 *level = toSecurityLevel(hLevel);
+                                                             }
+                                                             err = toStatusT(status);
+                                                         });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    if (mPluginV1_2 == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPluginV1_2->getOfflineLicenseKeySetIds(
+            [&](Status status, const hidl_vec<KeySetId>& hKeySetIds) {
+                if (status == Status::OK) {
+                    keySetIds = toKeySetIds(hKeySetIds);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    if (mPluginV1_2 == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    Return<Status> status = mPluginV1_2->removeOfflineLicense(toHidlVec(keySetId));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                            DrmPlugin::OfflineLicenseState* licenseState) const {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+
+    if (mPluginV1_2 == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+    *licenseState = DrmPlugin::kOfflineLicenseStateUnknown;
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPluginV1_2->getOfflineLicenseState(
+            toHidlVec(keySetId), [&](Status status, OfflineLicenseState hLicenseState) {
+                if (status == Status::OK) {
+                    *licenseState = toOfflineLicenseState(hLicenseState);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getPropertyString(String8 const& name, String8& value) const {
+    Mutex::Autolock autoLock(mLock);
+    return getPropertyStringInternal(name, value);
+}
+
+status_t DrmHalHidl::getPropertyStringInternal(String8 const& name, String8& value) const {
+    // This function is internal to the class and should only be called while
+    // mLock is already held.
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->getPropertyString(
+            toHidlString(name), [&](Status status, const hidl_string& hValue) {
+                if (status == Status::OK) {
+                    value = toString8(hValue);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
+    Mutex::Autolock autoLock(mLock);
+    return getPropertyByteArrayInternal(name, value);
+}
+
+status_t DrmHalHidl::getPropertyByteArrayInternal(String8 const& name,
+                                                  Vector<uint8_t>& value) const {
+    // This function is internal to the class and should only be called while
+    // mLock is already held.
+    INIT_CHECK();
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->getPropertyByteArray(
+            toHidlString(name), [&](Status status, const hidl_vec<uint8_t>& hValue) {
+                if (status == Status::OK) {
+                    value = toVector(hValue);
+                }
+                err = toStatusT(status);
+            });
+
+    err = hResult.isOk() ? err : DEAD_OBJECT;
+    if (name == kPropertyDeviceUniqueId) {
+        mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
+    }
+    return err;
+}
+
+status_t DrmHalHidl::setPropertyString(String8 const& name, String8 const& value) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    Return<Status> status = mPlugin->setPropertyString(toHidlString(name), toHidlString(value));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    Return<Status> status = mPlugin->setPropertyByteArray(toHidlString(name), toHidlVec(value));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
+    if (consumer == nullptr) {
+        return UNEXPECTED_NULL;
+    }
+    consumer->consumeFrameworkMetrics(mMetrics);
+
+    // Append vendor metrics if they are supported.
+    if (mPluginV1_1 != NULL) {
+        String8 vendor;
+        String8 description;
+        if (getPropertyStringInternal(String8("vendor"), vendor) != OK || vendor.isEmpty()) {
+            ALOGE("Get vendor failed or is empty");
+            vendor = "NONE";
+        }
+        if (getPropertyStringInternal(String8("description"), description) != OK ||
+            description.isEmpty()) {
+            ALOGE("Get description failed or is empty.");
+            description = "NONE";
+        }
+        vendor += ".";
+        vendor += description;
+
+        hidl_vec<DrmMetricGroup> pluginMetrics;
+        status_t err = UNKNOWN_ERROR;
+
+        Return<void> status =
+                mPluginV1_1->getMetrics([&](Status status, hidl_vec<DrmMetricGroup> pluginMetrics) {
+                    if (status != Status::OK) {
+                        ALOGV("Error getting plugin metrics: %d", status);
+                    } else {
+                        consumer->consumeHidlMetrics(vendor, pluginMetrics);
+                    }
+                    err = toStatusT(status);
+                });
+        return status.isOk() ? err : DEAD_OBJECT;
+    }
+
+    return OK;
+}
+
+status_t DrmHalHidl::setCipherAlgorithm(Vector<uint8_t> const& sessionId,
+                                        String8 const& algorithm) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    Return<Status> status =
+            mPlugin->setCipherAlgorithm(toHidlVec(sessionId), toHidlString(algorithm));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    Return<Status> status = mPlugin->setMacAlgorithm(toHidlVec(sessionId), toHidlString(algorithm));
+    return status.isOk() ? toStatusT(status) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                             Vector<uint8_t>& output) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult =
+            mPlugin->encrypt(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(input),
+                             toHidlVec(iv), [&](Status status, const hidl_vec<uint8_t>& hOutput) {
+                                 if (status == Status::OK) {
+                                     output = toVector(hOutput);
+                                 }
+                                 err = toStatusT(status);
+                             });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                             Vector<uint8_t>& output) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult =
+            mPlugin->decrypt(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(input),
+                             toHidlVec(iv), [&](Status status, const hidl_vec<uint8_t>& hOutput) {
+                                 if (status == Status::OK) {
+                                     output = toVector(hOutput);
+                                 }
+                                 err = toStatusT(status);
+                             });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                          Vector<uint8_t> const& message, Vector<uint8_t>& signature) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->sign(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(message),
+                                         [&](Status status, const hidl_vec<uint8_t>& hSignature) {
+                                             if (status == Status::OK) {
+                                                 signature = toVector(hSignature);
+                                             }
+                                             err = toStatusT(status);
+                                         });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                            Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                            bool& match) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult =
+            mPlugin->verify(toHidlVec(sessionId), toHidlVec(keyId), toHidlVec(message),
+                            toHidlVec(signature), [&](Status status, bool hMatch) {
+                                if (status == Status::OK) {
+                                    match = hMatch;
+                                } else {
+                                    match = false;
+                                }
+                                err = toStatusT(status);
+                            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                             Vector<uint8_t>& signature) {
+    Mutex::Autolock autoLock(mLock);
+    INIT_CHECK();
+
+    DrmSessionManager::Instance()->useSession(sessionId);
+
+    status_t err = UNKNOWN_ERROR;
+
+    Return<void> hResult = mPlugin->signRSA(
+            toHidlVec(sessionId), toHidlString(algorithm), toHidlVec(message),
+            toHidlVec(wrappedKey), [&](Status status, const hidl_vec<uint8_t>& hSignature) {
+                if (status == Status::OK) {
+                    signature = toVector(hSignature);
+                }
+                err = toStatusT(status);
+            });
+
+    return hResult.isOk() ? err : DEAD_OBJECT;
+}
+
+std::string DrmHalHidl::reportFrameworkMetrics(const std::string& pluginMetrics) const {
+    mediametrics_handle_t item(mediametrics_create("mediadrm"));
+    mediametrics_setUid(item, mMetrics.GetAppUid());
+    String8 vendor;
+    String8 description;
+    status_t result = getPropertyStringInternal(String8("vendor"), vendor);
+    if (result != OK) {
+        ALOGE("Failed to get vendor from drm plugin: %d", result);
+    } else {
+        mediametrics_setCString(item, "vendor", vendor.c_str());
+    }
+    result = getPropertyStringInternal(String8("description"), description);
+    if (result != OK) {
+        ALOGE("Failed to get description from drm plugin: %d", result);
+    } else {
+        mediametrics_setCString(item, "description", description.c_str());
+    }
+
+    std::string serializedMetrics;
+    result = mMetrics.GetSerializedMetrics(&serializedMetrics);
+    if (result != OK) {
+        ALOGE("Failed to serialize framework metrics: %d", result);
+    }
+    std::string b64EncodedMetrics =
+            toBase64StringNoPad(serializedMetrics.data(), serializedMetrics.size());
+    if (!b64EncodedMetrics.empty()) {
+        mediametrics_setCString(item, "serialized_metrics", b64EncodedMetrics.c_str());
+    }
+    if (!pluginMetrics.empty()) {
+        mediametrics_setCString(item, "plugin_metrics", pluginMetrics.c_str());
+    }
+    if (!mediametrics_selfRecord(item)) {
+        ALOGE("Failed to self record framework metrics");
+    }
+    mediametrics_delete(item);
+    return serializedMetrics;
+}
+
+std::string DrmHalHidl::reportPluginMetrics() const {
+    Vector<uint8_t> metricsVector;
+    String8 vendor;
+    String8 description;
+    std::string metricsString;
+    if (getPropertyStringInternal(String8("vendor"), vendor) == OK &&
+        getPropertyStringInternal(String8("description"), description) == OK &&
+        getPropertyByteArrayInternal(String8("metrics"), metricsVector) == OK) {
+        metricsString = toBase64StringNoPad(metricsVector.array(), metricsVector.size());
+        status_t res = android::reportDrmPluginMetrics(metricsString, vendor, description,
+                                                       mMetrics.GetAppUid());
+        if (res != OK) {
+            ALOGE("Metrics were retrieved but could not be reported: %d", res);
+        }
+    }
+    return metricsString;
+}
+
+status_t DrmHalHidl::requiresSecureDecoder(const char* mime, bool* required) const {
+    Mutex::Autolock autoLock(mLock);
+    if (mPluginV1_4 == NULL) {
+        return false;
+    }
+    auto hResult = mPluginV1_4->requiresSecureDecoderDefault(hidl_string(mime));
+    if (!hResult.isOk()) {
+        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (required) {
+        *required = hResult;
+    }
+    return OK;
+}
+
+status_t DrmHalHidl::requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
+                                           bool* required) const {
+    Mutex::Autolock autoLock(mLock);
+    if (mPluginV1_4 == NULL) {
+        return false;
+    }
+    auto hLevel = toHidlSecurityLevel(securityLevel);
+    auto hResult = mPluginV1_4->requiresSecureDecoder(hidl_string(mime), hLevel);
+    if (!hResult.isOk()) {
+        DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (required) {
+        *required = hResult;
+    }
+    return OK;
+}
+
+status_t DrmHalHidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
+    Mutex::Autolock autoLock(mLock);
+    if (mPluginV1_4 == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+    auto err = mPluginV1_4->setPlaybackId(toHidlVec(sessionId), hidl_string(playbackId));
+    return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+}
+
+status_t DrmHalHidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
+    Mutex::Autolock autoLock(mLock);
+    return DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs);
+}
+
+status_t DrmHalHidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+    Mutex::Autolock autoLock(mLock);
+    for (auto &factory : mFactories) {
+        sp<drm::V1_3::IDrmFactory> factoryV1_3 = drm::V1_3::IDrmFactory::castFrom(factory);
+        if (factoryV1_3 == nullptr) {
+            continue;
+        }
+
+        factoryV1_3->getSupportedCryptoSchemes(
+            [&](const hardware::hidl_vec<hardware::hidl_array<uint8_t, 16>>& schemes_hidl) {
+                for (const auto &scheme : schemes_hidl) {
+                    schemes.insert(schemes.end(), scheme.data(), scheme.data() + scheme.size());
+                }
+            });
+    }
+
+    return OK;
+}
+
+}  // namespace android
diff --git a/drm/libmediadrm/DrmHalListener.cpp b/drm/libmediadrm/DrmHalListener.cpp
new file mode 100644
index 0000000..cfcf475
--- /dev/null
+++ b/drm/libmediadrm/DrmHalListener.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DrmHalListener"
+
+#include <mediadrm/DrmHalListener.h>
+
+using ::aidl::android::hardware::drm::KeyStatusType;
+using ::android::hardware::hidl_vec;
+
+namespace android {
+
+static const Vector<uint8_t> toVector(const std::vector<uint8_t>& vec) {
+    Vector<uint8_t> vector;
+    vector.appendArray(vec.data(), vec.size());
+    return *const_cast<const Vector<uint8_t>*>(&vector);
+}
+
+template <typename T = uint8_t>
+static hidl_vec<T> toHidlVec(const Vector<T>& vector) {
+    hidl_vec<T> vec;
+    vec.setToExternal(const_cast<T*>(vector.array()), vector.size());
+    return vec;
+}
+
+DrmHalListener::DrmHalListener(MediaDrmMetrics* metrics)
+    : mMetrics(metrics) {}
+
+DrmHalListener::~DrmHalListener() {}
+
+void DrmHalListener::setListener(sp<IDrmClient> listener) {
+    Mutex::Autolock lock(mEventLock);
+    mListener = listener;
+}
+
+::ndk::ScopedAStatus DrmHalListener::onEvent(EventTypeAidl eventTypeAidl,
+                                         const std::vector<uint8_t>& sessionId,
+                                         const std::vector<uint8_t>& data) {
+    mMetrics->mEventCounter.Increment((uint32_t)eventTypeAidl);
+
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        Mutex::Autolock lock(mNotifyLock);
+        DrmPlugin::EventType eventType;
+        switch (eventTypeAidl) {
+            case EventTypeAidl::PROVISION_REQUIRED:
+                eventType = DrmPlugin::kDrmPluginEventProvisionRequired;
+                break;
+            case EventTypeAidl::KEY_NEEDED:
+                eventType = DrmPlugin::kDrmPluginEventKeyNeeded;
+                break;
+            case EventTypeAidl::KEY_EXPIRED:
+                eventType = DrmPlugin::kDrmPluginEventKeyExpired;
+                break;
+            case EventTypeAidl::VENDOR_DEFINED:
+                eventType = DrmPlugin::kDrmPluginEventVendorDefined;
+                break;
+            case EventTypeAidl::SESSION_RECLAIMED:
+                eventType = DrmPlugin::kDrmPluginEventSessionReclaimed;
+                break;
+            default:
+                return ::ndk::ScopedAStatus::ok();
+        }
+
+        listener->sendEvent(eventType, toHidlVec(toVector(sessionId)), toHidlVec(toVector(data)));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalListener::onExpirationUpdate(const std::vector<uint8_t>& sessionId,
+                                                    int64_t expiryTimeInMS) {
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        Mutex::Autolock lock(mNotifyLock);
+        listener->sendExpirationUpdate(toHidlVec(toVector(sessionId)), expiryTimeInMS);
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalListener::onKeysChange(const std::vector<uint8_t>& sessionId,
+                                              const std::vector<KeyStatusAidl>& keyStatusListAidl,
+                                              bool hasNewUsableKey) {
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        std::vector<DrmKeyStatus> keyStatusList;
+        size_t nKeys = keyStatusListAidl.size();
+        for (size_t i = 0; i < nKeys; ++i) {
+            const KeyStatusAidl keyStatus = keyStatusListAidl[i];
+            uint32_t type;
+            switch (keyStatus.type) {
+                case KeyStatusType::USABLE:
+                    type = DrmPlugin::kKeyStatusType_Usable;
+                    break;
+                case KeyStatusType::EXPIRED:
+                    type = DrmPlugin::kKeyStatusType_Expired;
+                    break;
+                case KeyStatusType::OUTPUT_NOT_ALLOWED:
+                    type = DrmPlugin::kKeyStatusType_OutputNotAllowed;
+                    break;
+                case KeyStatusType::STATUS_PENDING:
+                    type = DrmPlugin::kKeyStatusType_StatusPending;
+                    break;
+                case KeyStatusType::USABLE_IN_FUTURE:
+                    type = DrmPlugin::kKeyStatusType_UsableInFuture;
+                    break;
+                case KeyStatusType::INTERNAL_ERROR:
+                default:
+                    type = DrmPlugin::kKeyStatusType_InternalError;
+                    break;
+            }
+            keyStatusList.push_back({type, toHidlVec(toVector(keyStatus.keyId))});
+            mMetrics->mKeyStatusChangeCounter.Increment((uint32_t)keyStatus.type);
+        }
+
+        Mutex::Autolock lock(mNotifyLock);
+        listener->sendKeysChange(toHidlVec(toVector(sessionId)), keyStatusList, hasNewUsableKey);
+    }
+    else {
+        // There's no listener. But we still want to count the key change
+        // events.
+        size_t nKeys = keyStatusListAidl.size();
+
+        for (size_t i = 0; i < nKeys; i++) {
+            mMetrics->mKeyStatusChangeCounter.Increment((uint32_t)keyStatusListAidl[i].type);
+        }
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalListener::onSessionLostState(const std::vector<uint8_t>& sessionId) {
+    ::ndk::ScopedAStatus _aidl_status;
+    mEventLock.lock();
+    sp<IDrmClient> listener = mListener;
+    mEventLock.unlock();
+
+    if (listener != NULL) {
+        Mutex::Autolock lock(mNotifyLock);
+        listener->sendSessionLostState(toHidlVec(toVector(sessionId)));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/DrmMetrics.cpp b/drm/libmediadrm/DrmMetrics.cpp
index 996fd19..77b5343 100644
--- a/drm/libmediadrm/DrmMetrics.cpp
+++ b/drm/libmediadrm/DrmMetrics.cpp
@@ -123,20 +123,19 @@
         });
 
     mKeyStatusChangeCounter.ExportValues(
-        [&](const KeyStatusType key_status_type, const int64_t value) {
+        [&](const uint32_t key_status_type, const int64_t value) {
             DrmFrameworkMetrics::Counter *counter =
                 metrics.add_key_status_change_counter();
             counter->set_count(value);
-            counter->mutable_attributes()->set_key_status_type(
-                (uint32_t)key_status_type);
+            counter->mutable_attributes()->set_key_status_type(key_status_type);
         });
 
     mEventCounter.ExportValues(
-        [&](const EventType event_type, const int64_t value) {
+        [&](const uint32_t event_type, const int64_t value) {
             DrmFrameworkMetrics::Counter *counter =
                 metrics.add_event_callback_counter();
             counter->set_count(value);
-            counter->mutable_attributes()->set_event_type((uint32_t)event_type);
+            counter->mutable_attributes()->set_event_type(event_type);
         });
 
     mGetDeviceUniqueIdCounter.ExportValues(
diff --git a/drm/libmediadrm/DrmMetricsConsumer.cpp b/drm/libmediadrm/DrmMetricsConsumer.cpp
index 5f0b26e..c06f09b 100644
--- a/drm/libmediadrm/DrmMetricsConsumer.cpp
+++ b/drm/libmediadrm/DrmMetricsConsumer.cpp
@@ -32,26 +32,24 @@
 
 namespace {
 
-template <typename T> std::string GetAttributeName(T type);
-
-template <> std::string GetAttributeName<KeyStatusType>(KeyStatusType type) {
-    static const char *type_names[] = {"USABLE", "EXPIRED",
+std::string GetAttributeName(const std::string &typeName, uint32_t attribute) {
+    if (typeName == "KeyStatusChange") {
+        static const char *type_names[] = {"USABLE", "EXPIRED",
                                        "OUTPUT_NOT_ALLOWED", "STATUS_PENDING",
                                        "INTERNAL_ERROR", "USABLE_IN_FUTURE"};
-    if (((size_t)type) >= arraysize(type_names)) {
-        return "UNKNOWN_TYPE";
+        if (attribute >= arraysize(type_names)) {
+            return "UNKNOWN_TYPE";
+        }
+        return type_names[attribute];
     }
-    return type_names[(size_t)type];
-}
-
-template <> std::string GetAttributeName<EventType>(EventType type) {
+    
     static const char *type_names[] = {"PROVISION_REQUIRED", "KEY_NEEDED",
                                        "KEY_EXPIRED", "VENDOR_DEFINED",
                                        "SESSION_RECLAIMED"};
-    if (((size_t)type) >= arraysize(type_names)) {
+    if (attribute >= arraysize(type_names)) {
         return "UNKNOWN_TYPE";
     }
-    return type_names[(size_t)type];
+    return type_names[attribute];
 }
 
 template <typename T>
@@ -87,14 +85,14 @@
 
 template <typename T>
 void ExportCounterMetricWithAttributeNames(
-    const android::CounterMetric<T> &counter, PersistableBundle *metrics) {
+    const android::CounterMetric<T> &counter, const std::string &typeName, PersistableBundle *metrics) {
     if (!metrics) {
         ALOGE("metrics was unexpectedly null.");
         return;
     }
-    counter.ExportValues([&](const T &attribute, const int64_t value) {
+    counter.ExportValues([&](const uint32_t attribute, const int64_t value) {
         std::string name = counter.metric_name() + "." +
-                           GetAttributeName(attribute) + ".count";
+                           GetAttributeName(typeName, attribute) + ".count";
         metrics->putLong(android::String16(name.c_str()), value);
     });
 }
@@ -196,8 +194,8 @@
     ExportEventMetric(metrics.mProvideKeyResponseTimeUs, mBundle);
     ExportCounterMetric(metrics.mGetProvisionRequestCounter, mBundle);
     ExportCounterMetric(metrics.mProvideProvisionResponseCounter, mBundle);
-    ExportCounterMetricWithAttributeNames(metrics.mKeyStatusChangeCounter, mBundle);
-    ExportCounterMetricWithAttributeNames(metrics.mEventCounter, mBundle);
+    ExportCounterMetricWithAttributeNames(metrics.mKeyStatusChangeCounter, "KeyStatusChange", mBundle);
+    ExportCounterMetricWithAttributeNames(metrics.mEventCounter, "Event", mBundle);
     ExportCounterMetric(metrics.mGetDeviceUniqueIdCounter, mBundle);
     ExportSessionLifespans(metrics.GetSessionLifespans(), mBundle);
     return android::OK;
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index 0b117a3..be0cd4b 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "DrmUtils"
 
+#include <android/binder_manager.h>
 #include <android/hardware/drm/1.0/ICryptoFactory.h>
 #include <android/hardware/drm/1.0/ICryptoPlugin.h>
 #include <android/hardware/drm/1.0/IDrmFactory.h>
@@ -32,10 +33,10 @@
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/HidlSupport.h>
 
+#include <cutils/properties.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
-#include <cutils/properties.h>
 
 #include <mediadrm/CryptoHal.h>
 #include <mediadrm/DrmHal.h>
@@ -57,10 +58,10 @@
 
 namespace {
 
-template<typename Hal>
-Hal *MakeObject(status_t *pstatus) {
+template <typename Hal>
+Hal* MakeObject(status_t* pstatus) {
     status_t err = OK;
-    status_t &status = pstatus ? *pstatus : err;
+    status_t& status = pstatus ? *pstatus : err;
     auto obj = new Hal();
     status = obj->initCheck();
     if (status != OK && status != NO_INIT) {
@@ -70,43 +71,44 @@
 }
 
 template <typename Hal, typename V, typename M>
-void MakeHidlFactories(const uint8_t uuid[16], V &factories, M& instances) {
+void MakeHidlFactories(const uint8_t uuid[16], V& factories, M& instances) {
     sp<HServiceManager> serviceManager = HServiceManager::getService();
     if (serviceManager == nullptr) {
         LOG2BE("Failed to get service manager");
         return;
     }
 
-    serviceManager->listManifestByInterface(Hal::descriptor, [&](const hidl_vec<hidl_string> &registered) {
-        for (const auto &instance : registered) {
-            auto factory = Hal::getService(instance);
-            if (factory != nullptr) {
-                instances[instance.c_str()] = Hal::descriptor;
-                if (!uuid) {
-                    factories.push_back(factory);
-                    continue;
+    serviceManager->listManifestByInterface(
+            Hal::descriptor, [&](const hidl_vec<hidl_string>& registered) {
+                for (const auto& instance : registered) {
+                    auto factory = Hal::getService(instance);
+                    if (factory != nullptr) {
+                        instances[instance.c_str()] = Hal::descriptor;
+                        if (!uuid) {
+                            factories.push_back(factory);
+                            continue;
+                        }
+                        auto supported = factory->isCryptoSchemeSupported(uuid);
+                        if (!supported.isOk()) {
+                            LOG2BE(uuid, "isCryptoSchemeSupported txn failed: %s",
+                                   supported.description().c_str());
+                            continue;
+                        }
+                        if (supported) {
+                            factories.push_back(factory);
+                        }
+                    }
                 }
-                auto supported = factory->isCryptoSchemeSupported(uuid);
-                if (!supported.isOk()) {
-                    LOG2BE(uuid, "isCryptoSchemeSupported txn failed: %s",
-                           supported.description().c_str());
-                    continue;
-                }
-                if (supported) {
-                    factories.push_back(factory);
-                }
-            }
-        }
-    });
+            });
 }
 
 template <typename Hal, typename V>
-void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+void MakeHidlFactories(const uint8_t uuid[16], V& factories) {
     std::map<std::string, std::string> instances;
     MakeHidlFactories<Hal>(uuid, factories, instances);
 }
 
-hidl_vec<uint8_t> toHidlVec(const void *ptr, size_t size) {
+hidl_vec<uint8_t> toHidlVec(const void* ptr, size_t size) {
     hidl_vec<uint8_t> vec(size);
     if (ptr != nullptr) {
         memcpy(vec.data(), ptr, size);
@@ -114,19 +116,19 @@
     return vec;
 }
 
-hidl_array<uint8_t, 16> toHidlArray16(const uint8_t *ptr) {
+hidl_array<uint8_t, 16> toHidlArray16(const uint8_t* ptr) {
     if (ptr == nullptr) {
         return hidl_array<uint8_t, 16>();
     }
     return hidl_array<uint8_t, 16>(ptr);
 }
 
-sp<::V1_0::IDrmPlugin> MakeDrmPlugin(const sp<::V1_0::IDrmFactory> &factory,
-                                     const uint8_t uuid[16], const char *appPackageName) {
+sp<::V1_0::IDrmPlugin> MakeDrmPlugin(const sp<::V1_0::IDrmFactory>& factory, const uint8_t uuid[16],
+                                     const char* appPackageName) {
     sp<::V1_0::IDrmPlugin> plugin;
     auto err = factory->createPlugin(
             toHidlArray16(uuid), hidl_string(appPackageName),
-            [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
+            [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin>& hPlugin) {
                 if (status != ::V1_0::Status::OK) {
                     LOG2BE(uuid, "MakeDrmPlugin failed: %d", status);
                     return;
@@ -141,13 +143,13 @@
     }
 }
 
-sp<::V1_0::ICryptoPlugin> MakeCryptoPlugin(const sp<::V1_0::ICryptoFactory> &factory,
-                                           const uint8_t uuid[16], const void *initData,
+sp<::V1_0::ICryptoPlugin> MakeCryptoPlugin(const sp<::V1_0::ICryptoFactory>& factory,
+                                           const uint8_t uuid[16], const void* initData,
                                            size_t initDataSize) {
     sp<::V1_0::ICryptoPlugin> plugin;
     auto err = factory->createPlugin(
             toHidlArray16(uuid), toHidlVec(initData, initDataSize),
-            [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
+            [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin>& hPlugin) {
                 if (status != ::V1_0::Status::OK) {
                     LOG2BE(uuid, "MakeCryptoPlugin failed: %d", status);
                     return;
@@ -162,17 +164,38 @@
     }
 }
 
-} // namespace
+}  // namespace
 
 bool UseDrmService() {
     return property_get_bool("mediadrm.use_mediadrmserver", true);
 }
 
-sp<IDrm> MakeDrm(status_t *pstatus) {
+std::vector<std::shared_ptr<IDrmFactoryAidl>> makeDrmFactoriesAidl() {
+    std::vector<std::shared_ptr<IDrmFactoryAidl>> factories;
+    AServiceManager_forEachDeclaredInstance(
+        IDrmFactoryAidl::descriptor, static_cast<void*>(&factories),
+        [](const char* instance, void* context) {
+            auto fullName = std::string(IDrmFactoryAidl::descriptor) + "/" + std::string(instance);
+            auto factory = IDrmFactoryAidl::fromBinder(
+                    ::ndk::SpAIBinder(AServiceManager_waitForService(fullName.c_str())));
+            if (factory == nullptr) {
+                ALOGE("not found IDrmFactory. Instance name:[%s]", fullName.c_str());
+                return;
+            }
+
+            ALOGI("found IDrmFactory. Instance name:[%s]", fullName.c_str());
+            static_cast<std::vector<std::shared_ptr<IDrmFactoryAidl>>*>(context)->emplace_back(
+                    factory);
+        });
+
+    return factories;
+}
+
+sp<IDrm> MakeDrm(status_t* pstatus) {
     return MakeObject<DrmHal>(pstatus);
 }
 
-sp<ICrypto> MakeCrypto(status_t *pstatus) {
+sp<ICrypto> MakeCrypto(status_t* pstatus) {
     return MakeObject<CryptoHal>(pstatus);
 }
 
@@ -191,9 +214,9 @@
 }
 
 std::vector<sp<::V1_0::IDrmPlugin>> MakeDrmPlugins(const uint8_t uuid[16],
-                                              const char *appPackageName) {
+                                                   const char* appPackageName) {
     std::vector<sp<::V1_0::IDrmPlugin>> plugins;
-    for (const auto &factory : MakeDrmFactories(uuid)) {
+    for (const auto& factory : MakeDrmFactories(uuid)) {
         plugins.push_back(MakeDrmPlugin(factory, uuid, appPackageName));
     }
     return plugins;
@@ -209,10 +232,11 @@
     return cryptoFactories;
 }
 
-std::vector<sp<ICryptoPlugin>> MakeCryptoPlugins(const uint8_t uuid[16], const void *initData,
-                                                 size_t initDataSize) {
-    std::vector<sp<ICryptoPlugin>> plugins;
-    for (const auto &factory : MakeCryptoFactories(uuid)) {
+std::vector<sp<::V1_0::ICryptoPlugin>> MakeCryptoPlugins(const uint8_t uuid[16],
+                                                         const void* initData,
+                                                         size_t initDataSize) {
+    std::vector<sp<::V1_0::ICryptoPlugin>> plugins;
+    for (const auto& factory : MakeCryptoFactories(uuid)) {
         plugins.push_back(MakeCryptoPlugin(factory, uuid, initData, initDataSize));
     }
     return plugins;
@@ -220,90 +244,90 @@
 
 status_t toStatusT_1_4(::V1_4::Status status) {
     switch (status) {
-    case ::V1_4::Status::OK:
-        return OK;
-    case ::V1_4::Status::BAD_VALUE:
-        return BAD_VALUE;
-    case ::V1_4::Status::ERROR_DRM_CANNOT_HANDLE:
-        return ERROR_DRM_CANNOT_HANDLE;
-    case ::V1_4::Status::ERROR_DRM_DECRYPT:
-        return ERROR_DRM_DECRYPT;
-    case ::V1_4::Status::ERROR_DRM_DEVICE_REVOKED:
-        return ERROR_DRM_DEVICE_REVOKED;
-    case ::V1_4::Status::ERROR_DRM_FRAME_TOO_LARGE:
-        return ERROR_DRM_FRAME_TOO_LARGE;
-    case ::V1_4::Status::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
-        return ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
-    case ::V1_4::Status::ERROR_DRM_INSUFFICIENT_SECURITY:
-        return ERROR_DRM_INSUFFICIENT_SECURITY;
-    case ::V1_4::Status::ERROR_DRM_INVALID_STATE:
-        return ERROR_DRM_INVALID_STATE;
-    case ::V1_4::Status::ERROR_DRM_LICENSE_EXPIRED:
-        return ERROR_DRM_LICENSE_EXPIRED;
-    case ::V1_4::Status::ERROR_DRM_NO_LICENSE:
-        return ERROR_DRM_NO_LICENSE;
-    case ::V1_4::Status::ERROR_DRM_NOT_PROVISIONED:
-        return ERROR_DRM_NOT_PROVISIONED;
-    case ::V1_4::Status::ERROR_DRM_RESOURCE_BUSY:
-        return ERROR_DRM_RESOURCE_BUSY;
-    case ::V1_4::Status::ERROR_DRM_RESOURCE_CONTENTION:
-        return ERROR_DRM_RESOURCE_CONTENTION;
-    case ::V1_4::Status::ERROR_DRM_SESSION_LOST_STATE:
-        return ERROR_DRM_SESSION_LOST_STATE;
-    case ::V1_4::Status::ERROR_DRM_SESSION_NOT_OPENED:
-        return ERROR_DRM_SESSION_NOT_OPENED;
+        case ::V1_4::Status::OK:
+            return OK;
+        case ::V1_4::Status::BAD_VALUE:
+            return BAD_VALUE;
+        case ::V1_4::Status::ERROR_DRM_CANNOT_HANDLE:
+            return ERROR_DRM_CANNOT_HANDLE;
+        case ::V1_4::Status::ERROR_DRM_DECRYPT:
+            return ERROR_DRM_DECRYPT;
+        case ::V1_4::Status::ERROR_DRM_DEVICE_REVOKED:
+            return ERROR_DRM_DEVICE_REVOKED;
+        case ::V1_4::Status::ERROR_DRM_FRAME_TOO_LARGE:
+            return ERROR_DRM_FRAME_TOO_LARGE;
+        case ::V1_4::Status::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
+            return ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
+        case ::V1_4::Status::ERROR_DRM_INSUFFICIENT_SECURITY:
+            return ERROR_DRM_INSUFFICIENT_SECURITY;
+        case ::V1_4::Status::ERROR_DRM_INVALID_STATE:
+            return ERROR_DRM_INVALID_STATE;
+        case ::V1_4::Status::ERROR_DRM_LICENSE_EXPIRED:
+            return ERROR_DRM_LICENSE_EXPIRED;
+        case ::V1_4::Status::ERROR_DRM_NO_LICENSE:
+            return ERROR_DRM_NO_LICENSE;
+        case ::V1_4::Status::ERROR_DRM_NOT_PROVISIONED:
+            return ERROR_DRM_NOT_PROVISIONED;
+        case ::V1_4::Status::ERROR_DRM_RESOURCE_BUSY:
+            return ERROR_DRM_RESOURCE_BUSY;
+        case ::V1_4::Status::ERROR_DRM_RESOURCE_CONTENTION:
+            return ERROR_DRM_RESOURCE_CONTENTION;
+        case ::V1_4::Status::ERROR_DRM_SESSION_LOST_STATE:
+            return ERROR_DRM_SESSION_LOST_STATE;
+        case ::V1_4::Status::ERROR_DRM_SESSION_NOT_OPENED:
+            return ERROR_DRM_SESSION_NOT_OPENED;
 
-    // New in S / drm@1.4:
-    case ::V1_4::Status::CANNOT_DECRYPT_ZERO_SUBSAMPLES:
-        return ERROR_DRM_ZERO_SUBSAMPLES;
-    case ::V1_4::Status::CRYPTO_LIBRARY_ERROR:
-        return ERROR_DRM_CRYPTO_LIBRARY;
-    case ::V1_4::Status::GENERAL_OEM_ERROR:
-        return ERROR_DRM_GENERIC_OEM;
-    case ::V1_4::Status::GENERAL_PLUGIN_ERROR:
-        return ERROR_DRM_GENERIC_PLUGIN;
-    case ::V1_4::Status::INIT_DATA_INVALID:
-        return ERROR_DRM_INIT_DATA;
-    case ::V1_4::Status::KEY_NOT_LOADED:
-        return ERROR_DRM_KEY_NOT_LOADED;
-    case ::V1_4::Status::LICENSE_PARSE_ERROR:
-        return ERROR_DRM_LICENSE_PARSE;
-    case ::V1_4::Status::LICENSE_POLICY_ERROR:
-        return ERROR_DRM_LICENSE_POLICY;
-    case ::V1_4::Status::LICENSE_RELEASE_ERROR:
-        return ERROR_DRM_LICENSE_RELEASE;
-    case ::V1_4::Status::LICENSE_REQUEST_REJECTED:
-        return ERROR_DRM_LICENSE_REQUEST_REJECTED;
-    case ::V1_4::Status::LICENSE_RESTORE_ERROR:
-        return ERROR_DRM_LICENSE_RESTORE;
-    case ::V1_4::Status::LICENSE_STATE_ERROR:
-        return ERROR_DRM_LICENSE_STATE;
-    case ::V1_4::Status::MALFORMED_CERTIFICATE:
-        return ERROR_DRM_CERTIFICATE_MALFORMED;
-    case ::V1_4::Status::MEDIA_FRAMEWORK_ERROR:
-        return ERROR_DRM_MEDIA_FRAMEWORK;
-    case ::V1_4::Status::MISSING_CERTIFICATE:
-        return ERROR_DRM_CERTIFICATE_MISSING;
-    case ::V1_4::Status::PROVISIONING_CERTIFICATE_ERROR:
-        return ERROR_DRM_PROVISIONING_CERTIFICATE;
-    case ::V1_4::Status::PROVISIONING_CONFIGURATION_ERROR:
-        return ERROR_DRM_PROVISIONING_CONFIG;
-    case ::V1_4::Status::PROVISIONING_PARSE_ERROR:
-        return ERROR_DRM_PROVISIONING_PARSE;
-    case ::V1_4::Status::PROVISIONING_REQUEST_REJECTED:
-        return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
-    case ::V1_4::Status::RETRYABLE_PROVISIONING_ERROR:
-        return ERROR_DRM_PROVISIONING_RETRY;
-    case ::V1_4::Status::SECURE_STOP_RELEASE_ERROR:
-        return ERROR_DRM_SECURE_STOP_RELEASE;
-    case ::V1_4::Status::STORAGE_READ_FAILURE:
-        return ERROR_DRM_STORAGE_READ;
-    case ::V1_4::Status::STORAGE_WRITE_FAILURE:
-        return ERROR_DRM_STORAGE_WRITE;
+        // New in S / drm@1.4:
+        case ::V1_4::Status::CANNOT_DECRYPT_ZERO_SUBSAMPLES:
+            return ERROR_DRM_ZERO_SUBSAMPLES;
+        case ::V1_4::Status::CRYPTO_LIBRARY_ERROR:
+            return ERROR_DRM_CRYPTO_LIBRARY;
+        case ::V1_4::Status::GENERAL_OEM_ERROR:
+            return ERROR_DRM_GENERIC_OEM;
+        case ::V1_4::Status::GENERAL_PLUGIN_ERROR:
+            return ERROR_DRM_GENERIC_PLUGIN;
+        case ::V1_4::Status::INIT_DATA_INVALID:
+            return ERROR_DRM_INIT_DATA;
+        case ::V1_4::Status::KEY_NOT_LOADED:
+            return ERROR_DRM_KEY_NOT_LOADED;
+        case ::V1_4::Status::LICENSE_PARSE_ERROR:
+            return ERROR_DRM_LICENSE_PARSE;
+        case ::V1_4::Status::LICENSE_POLICY_ERROR:
+            return ERROR_DRM_LICENSE_POLICY;
+        case ::V1_4::Status::LICENSE_RELEASE_ERROR:
+            return ERROR_DRM_LICENSE_RELEASE;
+        case ::V1_4::Status::LICENSE_REQUEST_REJECTED:
+            return ERROR_DRM_LICENSE_REQUEST_REJECTED;
+        case ::V1_4::Status::LICENSE_RESTORE_ERROR:
+            return ERROR_DRM_LICENSE_RESTORE;
+        case ::V1_4::Status::LICENSE_STATE_ERROR:
+            return ERROR_DRM_LICENSE_STATE;
+        case ::V1_4::Status::MALFORMED_CERTIFICATE:
+            return ERROR_DRM_CERTIFICATE_MALFORMED;
+        case ::V1_4::Status::MEDIA_FRAMEWORK_ERROR:
+            return ERROR_DRM_MEDIA_FRAMEWORK;
+        case ::V1_4::Status::MISSING_CERTIFICATE:
+            return ERROR_DRM_CERTIFICATE_MISSING;
+        case ::V1_4::Status::PROVISIONING_CERTIFICATE_ERROR:
+            return ERROR_DRM_PROVISIONING_CERTIFICATE;
+        case ::V1_4::Status::PROVISIONING_CONFIGURATION_ERROR:
+            return ERROR_DRM_PROVISIONING_CONFIG;
+        case ::V1_4::Status::PROVISIONING_PARSE_ERROR:
+            return ERROR_DRM_PROVISIONING_PARSE;
+        case ::V1_4::Status::PROVISIONING_REQUEST_REJECTED:
+            return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
+        case ::V1_4::Status::RETRYABLE_PROVISIONING_ERROR:
+            return ERROR_DRM_PROVISIONING_RETRY;
+        case ::V1_4::Status::SECURE_STOP_RELEASE_ERROR:
+            return ERROR_DRM_SECURE_STOP_RELEASE;
+        case ::V1_4::Status::STORAGE_READ_FAILURE:
+            return ERROR_DRM_STORAGE_READ;
+        case ::V1_4::Status::STORAGE_WRITE_FAILURE:
+            return ERROR_DRM_STORAGE_WRITE;
 
-    case ::V1_4::Status::ERROR_DRM_UNKNOWN:
-    default:
-        return ERROR_DRM_UNKNOWN;
+        case ::V1_4::Status::ERROR_DRM_UNKNOWN:
+        default:
+            return ERROR_DRM_UNKNOWN;
     }
     return ERROR_DRM_UNKNOWN;
 }
@@ -312,20 +336,34 @@
 char logPriorityToChar(::V1_4::LogPriority priority) {
     char p = 'U';
     switch (priority) {
-        case ::V1_4::LogPriority::VERBOSE:  p = 'V'; break;
-        case ::V1_4::LogPriority::DEBUG:    p = 'D'; break;
-        case ::V1_4::LogPriority::INFO:     p = 'I'; break;
-        case ::V1_4::LogPriority::WARN:     p = 'W'; break;
-        case ::V1_4::LogPriority::ERROR:    p = 'E'; break;
-        case ::V1_4::LogPriority::FATAL:    p = 'F'; break;
-        default: p = 'U'; break;
+        case ::V1_4::LogPriority::VERBOSE:
+            p = 'V';
+            break;
+        case ::V1_4::LogPriority::DEBUG:
+            p = 'D';
+            break;
+        case ::V1_4::LogPriority::INFO:
+            p = 'I';
+            break;
+        case ::V1_4::LogPriority::WARN:
+            p = 'W';
+            break;
+        case ::V1_4::LogPriority::ERROR:
+            p = 'E';
+            break;
+        case ::V1_4::LogPriority::FATAL:
+            p = 'F';
+            break;
+        default:
+            p = 'U';
+            break;
     }
     return p;
 }
 }  // namespace
 
-std::string GetExceptionMessage(status_t err, const char *msg,
-                                const Vector<::V1_4::LogMessage> &logs) {
+std::string GetExceptionMessage(status_t err, const char* msg,
+                                const Vector<::V1_4::LogMessage>& logs) {
     std::string ruler("==============================");
     std::string header("Beginning of DRM Plugin Log");
     std::string footer("End of DRM Plugin Log");
@@ -355,7 +393,7 @@
     return msg8.c_str();
 }
 
-void LogBuffer::addLog(const ::V1_4::LogMessage &log) {
+void LogBuffer::addLog(const ::V1_4::LogMessage& log) {
     std::unique_lock<std::mutex> lock(mMutex);
     mBuffer.push_back(log);
     while (mBuffer.size() > MAX_CAPACITY) {
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
index 7281066..a85e3cf 100644
--- a/drm/libmediadrm/fuzzer/Android.bp
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -35,7 +35,8 @@
     static_libs: [
         "libmediadrm",
         "liblog",
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
+        "libaidlcommonsupport",
     ],
     header_libs: [
         "libmedia_headers",
@@ -59,6 +60,7 @@
         "android.hardware.drm@1.4",
         "libhidlallocatorutils",
         "libhidlbase",
+        "android.hardware.drm-V1-ndk",
     ],
     fuzz_config: {
         cc: [
diff --git a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
index 8df0477..597b72d 100644
--- a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
+++ b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
@@ -20,10 +20,13 @@
 
 #include <binder/MemoryDealer.h>
 #include <hidlmemory/FrameworkUtils.h>
+#include <media/stagefright/foundation/AString.h>
 #include <mediadrm/CryptoHal.h>
 #include <mediadrm/DrmHal.h>
 #include <utils/String8.h>
 #include "fuzzer/FuzzedDataProvider.h"
+#include <binder/PersistableBundle.h>
+#include <android/hardware/drm/1.0/types.h>
 
 #define AES_BLOCK_SIZE 16
 #define UNUSED_PARAM __attribute__((unused))
@@ -33,6 +36,7 @@
 using android::hardware::fromHeap;
 using ::android::os::PersistableBundle;
 using drm::V1_0::BufferType;
+using ::android::hardware::drm::V1_0::DestinationBuffer;
 
 enum {
     INVALID_UUID = 0,
@@ -398,7 +402,7 @@
         .secureMemory = nullptr};
 
     const uint64_t offset = 0;
-    AString *errorDetailMsg = nullptr;
+    AString errorDetailMsg;
     CryptoPlugin::Mode mode;
     bool shouldPassRandomCryptoMode = mFuzzedDataProvider->ConsumeBool();
     if (shouldPassRandomCryptoMode) {
@@ -408,7 +412,7 @@
             kCryptoMode[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumCryptoMode - 1)];
     }
     mCrypto->decrypt(keyId, iv, mode, pattern, sourceBuffer, offset, subSamples, numSubSamples,
-                     destBuffer, errorDetailMsg);
+                     destBuffer, &errorDetailMsg);
 
     if (heapSeqNum >= 0) {
         mCrypto->unsetHeap(heapSeqNum);
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHal.h b/drm/libmediadrm/include/mediadrm/CryptoHal.h
index 5fd39e6..32a6741 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHal.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHal.h
@@ -15,25 +15,12 @@
  */
 
 #ifndef CRYPTO_HAL_H_
-
 #define CRYPTO_HAL_H_
 
-#include <android/hardware/drm/1.0/ICryptoFactory.h>
-#include <android/hardware/drm/1.0/ICryptoPlugin.h>
-#include <android/hardware/drm/1.1/ICryptoFactory.h>
-#include <android/hardware/drm/1.2/ICryptoPlugin.h>
-#include <android/hardware/drm/1.4/ICryptoPlugin.h>
-
 #include <mediadrm/ICrypto.h>
 #include <utils/KeyedVector.h>
 #include <utils/threads.h>
 
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::ICryptoFactory;
-using drm::V1_0::ICryptoPlugin;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::DestinationBuffer;
-
 using ::android::hardware::HidlMemory;
 
 class IMemoryHeap;
@@ -43,67 +30,30 @@
 struct CryptoHal : public ICrypto {
     CryptoHal();
     virtual ~CryptoHal();
-
     virtual status_t initCheck() const;
-
     virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]);
-
     virtual status_t createPlugin(
             const uint8_t uuid[16], const void *data, size_t size);
-
     virtual status_t destroyPlugin();
-
     virtual bool requiresSecureDecoderComponent(
             const char *mime) const;
-
     virtual void notifyResolution(uint32_t width, uint32_t height);
-
     virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId);
-
     virtual ssize_t decrypt(const uint8_t key[16], const uint8_t iv[16],
             CryptoPlugin::Mode mode, const CryptoPlugin::Pattern &pattern,
-            const ::SharedBuffer &source, size_t offset,
+            const drm::V1_0::SharedBuffer &source, size_t offset,
             const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
-            const ::DestinationBuffer &destination,
+            const drm::V1_0::DestinationBuffer &destination,
             AString *errorDetailMsg);
-
-    virtual int32_t setHeap(const sp<HidlMemory>& heap) {
-        return setHeapBase(heap);
-    }
-    virtual void unsetHeap(int32_t seqNum) { clearHeapBase(seqNum); }
-
+    virtual int32_t setHeap(const sp<HidlMemory>& heap);
+    virtual void unsetHeap(int32_t seqNum);
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
-
 private:
-    mutable Mutex mLock;
-
-    const Vector<sp<ICryptoFactory>> mFactories;
-    sp<ICryptoPlugin> mPlugin;
-    sp<drm::V1_2::ICryptoPlugin> mPluginV1_2;
-
-    /**
-     * mInitCheck is:
-     *   NO_INIT if a plugin hasn't been created yet
-     *   ERROR_UNSUPPORTED if a plugin can't be created for the uuid
-     *   OK after a plugin has been created and mPlugin is valid
-     */
-    status_t mInitCheck;
-
-    KeyedVector<int32_t, size_t> mHeapSizes;
-    int32_t mHeapSeqNum;
-
-    Vector<sp<ICryptoFactory>> makeCryptoFactories();
-    sp<ICryptoPlugin> makeCryptoPlugin(const sp<ICryptoFactory>& factory,
-            const uint8_t uuid[16], const void *initData, size_t size);
-
-    int32_t setHeapBase(const sp<HidlMemory>& heap);
-    void clearHeapBase(int32_t seqNum);
-
-    status_t checkSharedBuffer(const ::SharedBuffer& buffer);
-
+    sp<ICrypto> mCryptoHalHidl;
+    sp<ICrypto> mCryptoHalAidl;
     DISALLOW_EVIL_CONSTRUCTORS(CryptoHal);
 };
 
 }  // namespace android
 
-#endif  // CRYPTO_HAL_H_
+#endif  // CRYPTO_HAL_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h b/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
new file mode 100644
index 0000000..50878a6
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CRYPTO_HAL_AIDL_H_
+#define CRYPTO_HAL_AIDL_H_
+
+#include <aidl/android/hardware/drm/ICryptoPlugin.h>
+#include <aidl/android/hardware/drm/IDrmFactory.h>
+#include <mediadrm/ICrypto.h>
+#include <utils/KeyedVector.h>
+#include <utils/threads.h>
+
+using IDrmFactoryAidl = ::aidl::android::hardware::drm::IDrmFactory;
+using ICryptoPluginAidl = ::aidl::android::hardware::drm::ICryptoPlugin;
+using ::aidl::android::hardware::drm::Uuid;
+
+// -------Hidl interface related-----------------
+// TODO: replace before removing hidl interface
+using ::android::hardware::drm::V1_0::DestinationBuffer;
+using ::android::hardware::drm::V1_0::SharedBuffer;
+
+using ::android::hardware::HidlMemory;
+
+// -------Hidl interface related end-------------
+
+class IMemoryHeap;
+
+namespace android {
+
+struct CryptoHalAidl : public ICrypto {
+    CryptoHalAidl();
+    virtual ~CryptoHalAidl();
+    virtual status_t initCheck() const;
+    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]);
+    virtual status_t createPlugin(const uint8_t uuid[16], const void* data, size_t size);
+    virtual status_t destroyPlugin();
+    virtual bool requiresSecureDecoderComponent(const char* mime) const;
+    virtual void notifyResolution(uint32_t width, uint32_t height);
+    virtual status_t setMediaDrmSession(const Vector<uint8_t>& sessionId);
+    virtual ssize_t decrypt(const uint8_t key[16], const uint8_t iv[16], CryptoPlugin::Mode mode,
+                            const CryptoPlugin::Pattern& pattern, const ::SharedBuffer& source,
+                            size_t offset, const CryptoPlugin::SubSample* subSamples,
+                            size_t numSubSamples, const ::DestinationBuffer& destination,
+                            AString* errorDetailMsg);
+    virtual int32_t setHeap(const sp<HidlMemory>& heap);
+    virtual void unsetHeap(int32_t seqNum);
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
+
+  private:
+    mutable Mutex mLock;
+
+    const std::vector<std::shared_ptr<IDrmFactoryAidl>> mFactories;
+    std::shared_ptr<ICryptoPluginAidl> mPlugin;
+
+    /**
+     * mInitCheck is:
+     *   NO_INIT if a plugin hasn't been created yet
+     *   ERROR_UNSUPPORTED if a plugin can't be created for the uuid
+     *   OK after a plugin has been created and mPlugin is valid
+     */
+    status_t mInitCheck;
+
+    KeyedVector<int32_t, size_t> mHeapSizes;
+    int32_t mHeapSeqNum;
+
+    std::shared_ptr<ICryptoPluginAidl> makeCryptoPlugin(
+            const std::shared_ptr<IDrmFactoryAidl>& factory, const Uuid& uuidAidl,
+            const std::vector<uint8_t> initData);
+
+    status_t checkSharedBuffer(const ::SharedBuffer& buffer);
+    bool isCryptoSchemeSupportedInternal(const uint8_t uuid[16], int* factoryIdx);
+
+    DISALLOW_EVIL_CONSTRUCTORS(CryptoHalAidl);
+};
+
+}  // namespace android
+
+#endif // CRYPTO_HAL_AIDL_H_
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h b/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h
new file mode 100644
index 0000000..6db1e89
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/CryptoHalHidl.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CRYPTO_HAL_HIDL_H_
+#define CRYPTO_HAL_HIDL_H_
+
+#include <android/hardware/drm/1.0/ICryptoFactory.h>
+#include <android/hardware/drm/1.0/ICryptoPlugin.h>
+#include <android/hardware/drm/1.1/ICryptoFactory.h>
+#include <android/hardware/drm/1.2/ICryptoPlugin.h>
+#include <android/hardware/drm/1.4/ICryptoPlugin.h>
+
+#include <mediadrm/ICrypto.h>
+#include <utils/KeyedVector.h>
+#include <utils/threads.h>
+
+namespace drm = ::android::hardware::drm;
+using drm::V1_0::ICryptoFactory;
+using drm::V1_0::ICryptoPlugin;
+using drm::V1_0::SharedBuffer;
+using drm::V1_0::DestinationBuffer;
+
+using ::android::hardware::HidlMemory;
+
+class IMemoryHeap;
+
+namespace android {
+
+struct CryptoHalHidl : public ICrypto {
+    CryptoHalHidl();
+    virtual ~CryptoHalHidl();
+
+    virtual status_t initCheck() const;
+
+    virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]);
+
+    virtual status_t createPlugin(
+            const uint8_t uuid[16], const void *data, size_t size);
+
+    virtual status_t destroyPlugin();
+
+    virtual bool requiresSecureDecoderComponent(
+            const char *mime) const;
+
+    virtual void notifyResolution(uint32_t width, uint32_t height);
+
+    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId);
+
+    virtual ssize_t decrypt(const uint8_t key[16], const uint8_t iv[16],
+            CryptoPlugin::Mode mode, const CryptoPlugin::Pattern &pattern,
+            const ::SharedBuffer &source, size_t offset,
+            const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
+            const ::DestinationBuffer &destination,
+            AString *errorDetailMsg);
+
+    virtual int32_t setHeap(const sp<HidlMemory>& heap) {
+        return setHeapBase(heap);
+    }
+    virtual void unsetHeap(int32_t seqNum) { clearHeapBase(seqNum); }
+
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+
+private:
+    mutable Mutex mLock;
+
+    const Vector<sp<ICryptoFactory>> mFactories;
+    sp<ICryptoPlugin> mPlugin;
+    sp<drm::V1_2::ICryptoPlugin> mPluginV1_2;
+
+    /**
+     * mInitCheck is:
+     *   NO_INIT if a plugin hasn't been created yet
+     *   ERROR_UNSUPPORTED if a plugin can't be created for the uuid
+     *   OK after a plugin has been created and mPlugin is valid
+     */
+    status_t mInitCheck;
+
+    KeyedVector<int32_t, size_t> mHeapSizes;
+    int32_t mHeapSeqNum;
+
+    Vector<sp<ICryptoFactory>> makeCryptoFactories();
+    sp<ICryptoPlugin> makeCryptoPlugin(const sp<ICryptoFactory>& factory,
+            const uint8_t uuid[16], const void *initData, size_t size);
+
+    int32_t setHeapBase(const sp<HidlMemory>& heap);
+    void clearHeapBase(int32_t seqNum);
+
+    status_t checkSharedBuffer(const ::SharedBuffer& buffer);
+
+    DISALLOW_EVIL_CONSTRUCTORS(CryptoHalHidl);
+};
+
+}  // namespace android
+
+#endif  // CRYPTO_HAL_H_
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index 7eb1dec..eab597b 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -14,77 +14,27 @@
  * limitations under the License.
  */
 
-#ifndef DRM_HAL_H_
-
-#define DRM_HAL_H_
-
-#include <android/hardware/drm/1.0/IDrmFactory.h>
-#include <android/hardware/drm/1.0/IDrmPlugin.h>
-#include <android/hardware/drm/1.1/IDrmFactory.h>
-#include <android/hardware/drm/1.1/IDrmPlugin.h>
-#include <android/hardware/drm/1.2/IDrmFactory.h>
-#include <android/hardware/drm/1.2/IDrmPlugin.h>
-#include <android/hardware/drm/1.2/IDrmPluginListener.h>
-#include <android/hardware/drm/1.4/IDrmPlugin.h>
-#include <android/hardware/drm/1.4/types.h>
-
-#include <media/drm/DrmAPI.h>
-#include <mediadrm/DrmMetrics.h>
-#include <mediadrm/DrmSessionManager.h>
 #include <mediadrm/IDrm.h>
-#include <mediadrm/IDrmClient.h>
-#include <mediadrm/IDrmMetricsConsumer.h>
-#include <utils/threads.h>
 
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::EventType;
-using drm::V1_0::IDrmFactory;
-using drm::V1_0::IDrmPlugin;
-using drm::V1_0::IDrmPluginListener;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeyStatus;
-using drm::V1_2::OfflineLicenseState;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-
-typedef drm::V1_2::IDrmPluginListener IDrmPluginListener_V1_2;
-typedef drm::V1_0::KeyStatus KeyStatus_V1_0;
+#ifndef DRM_HAL_H_
+#define DRM_HAL_H_
 
 namespace android {
 
-struct DrmSessionClientInterface;
-
-inline bool operator==(const Vector<uint8_t> &l, const Vector<uint8_t> &r) {
-    if (l.size() != r.size()) return false;
-    return memcmp(l.array(), r.array(), l.size()) == 0;
-}
-
-struct DrmHal : public IDrm,
-                public IDrmPluginListener_V1_2 {
-
-    struct DrmSessionClient;
-
+struct DrmHal : public IDrm {
     DrmHal();
     virtual ~DrmHal();
-
     virtual status_t initCheck() const;
-
     virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16],
-                                             const String8& mimeType,
-                                             DrmPlugin::SecurityLevel level,
-                                             bool *isSupported);
-
+                                             const String8 &mimeType,
+                                             DrmPlugin::SecurityLevel securityLevel,
+                                             bool *result);
     virtual status_t createPlugin(const uint8_t uuid[16],
                                   const String8 &appPackageName);
-
     virtual status_t destroyPlugin();
-
-    virtual status_t openSession(DrmPlugin::SecurityLevel level,
+    virtual status_t openSession(DrmPlugin::SecurityLevel securityLevel,
             Vector<uint8_t> &sessionId);
-
     virtual status_t closeSession(Vector<uint8_t> const &sessionId);
-
     virtual status_t
         getKeyRequest(Vector<uint8_t> const &sessionId,
                       Vector<uint8_t> const &initData,
@@ -92,168 +42,89 @@
                       KeyedVector<String8, String8> const &optionalParameters,
                       Vector<uint8_t> &request, String8 &defaultUrl,
                       DrmPlugin::KeyRequestType *keyRequestType);
-
     virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
                                         Vector<uint8_t> const &response,
                                         Vector<uint8_t> &keySetId);
-
     virtual status_t removeKeys(Vector<uint8_t> const &keySetId);
-
     virtual status_t restoreKeys(Vector<uint8_t> const &sessionId,
                                  Vector<uint8_t> const &keySetId);
-
     virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId,
                                     KeyedVector<String8, String8> &infoMap) const;
-
     virtual status_t getProvisionRequest(String8 const &certType,
                                          String8 const &certAuthority,
                                          Vector<uint8_t> &request,
-                                         String8 &defaulUrl);
-
+                                         String8 &defaultUrl);
     virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
                                               Vector<uint8_t> &certificate,
                                               Vector<uint8_t> &wrappedKey);
-
     virtual status_t getSecureStops(List<Vector<uint8_t>> &secureStops);
     virtual status_t getSecureStopIds(List<Vector<uint8_t>> &secureStopIds);
     virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
-
     virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
     virtual status_t removeSecureStop(Vector<uint8_t> const &ssid);
     virtual status_t removeAllSecureStops();
-
     virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
             DrmPlugin::HdcpLevel *maxLevel) const;
     virtual status_t getNumberOfSessions(uint32_t *currentSessions,
             uint32_t *maxSessions) const;
     virtual status_t getSecurityLevel(Vector<uint8_t> const &sessionId,
             DrmPlugin::SecurityLevel *level) const;
-
     virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const;
     virtual status_t removeOfflineLicense(Vector<uint8_t> const &keySetId);
     virtual status_t getOfflineLicenseState(Vector<uint8_t> const &keySetId,
             DrmPlugin::OfflineLicenseState *licenseState) const;
-
-    virtual status_t getPropertyString(String8 const &name, String8 &value ) const;
+    virtual status_t getPropertyString(String8 const &name, String8 &value) const;
     virtual status_t getPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> &value ) const;
-    virtual status_t setPropertyString(String8 const &name, String8 const &value ) const;
+                                          Vector<uint8_t> &value) const;
+    virtual status_t setPropertyString(String8 const &name,
+                                       String8 const &value ) const;
     virtual status_t setPropertyByteArray(String8 const &name,
-                                          Vector<uint8_t> const &value ) const;
+                                          Vector<uint8_t> const &value) const;
     virtual status_t getMetrics(const sp<IDrmMetricsConsumer> &consumer);
-
     virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId,
                                         String8 const &algorithm);
-
     virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId,
                                      String8 const &algorithm);
-
     virtual status_t encrypt(Vector<uint8_t> const &sessionId,
                              Vector<uint8_t> const &keyId,
                              Vector<uint8_t> const &input,
                              Vector<uint8_t> const &iv,
                              Vector<uint8_t> &output);
-
     virtual status_t decrypt(Vector<uint8_t> const &sessionId,
                              Vector<uint8_t> const &keyId,
                              Vector<uint8_t> const &input,
                              Vector<uint8_t> const &iv,
                              Vector<uint8_t> &output);
-
     virtual status_t sign(Vector<uint8_t> const &sessionId,
                           Vector<uint8_t> const &keyId,
                           Vector<uint8_t> const &message,
                           Vector<uint8_t> &signature);
-
     virtual status_t verify(Vector<uint8_t> const &sessionId,
                             Vector<uint8_t> const &keyId,
                             Vector<uint8_t> const &message,
                             Vector<uint8_t> const &signature,
                             bool &match);
-
     virtual status_t signRSA(Vector<uint8_t> const &sessionId,
                              String8 const &algorithm,
                              Vector<uint8_t> const &message,
                              Vector<uint8_t> const &wrappedKey,
                              Vector<uint8_t> &signature);
-
     virtual status_t setListener(const sp<IDrmClient>& listener);
-
     virtual status_t requiresSecureDecoder(const char *mime, bool *required) const;
-
     virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
                                            bool *required) const;
-
     virtual status_t setPlaybackId(
             Vector<uint8_t> const &sessionId,
             const char *playbackId);
-
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
-
-    // Methods of IDrmPluginListener
-    Return<void> sendEvent(EventType eventType,
-            const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& data);
-
-    Return<void> sendExpirationUpdate(const hidl_vec<uint8_t>& sessionId,
-            int64_t expiryTimeInMS);
-
-    Return<void> sendKeysChange(const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<KeyStatus_V1_0>& keyStatusList, bool hasNewUsableKey);
-
-    Return<void> sendKeysChange_1_2(const hidl_vec<uint8_t>& sessionId,
-            const hidl_vec<KeyStatus>& keyStatusList, bool hasNewUsableKey);
-
-    Return<void> sendSessionLostState(const hidl_vec<uint8_t>& sessionId);
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
 
 private:
-    static Mutex mLock;
-
-    sp<IDrmClient> mListener;
-    mutable Mutex mEventLock;
-    mutable Mutex mNotifyLock;
-
-    const std::vector<sp<IDrmFactory>> mFactories;
-    sp<IDrmPlugin> mPlugin;
-    sp<drm::V1_1::IDrmPlugin> mPluginV1_1;
-    sp<drm::V1_2::IDrmPlugin> mPluginV1_2;
-    sp<drm::V1_4::IDrmPlugin> mPluginV1_4;
-    String8 mAppPackageName;
-
-    // Mutable to allow modification within GetPropertyByteArray.
-    mutable MediaDrmMetrics mMetrics;
-
-    std::vector<std::shared_ptr<DrmSessionClient>> mOpenSessions;
-    void closeOpenSessions();
-    void cleanup();
-
-    /**
-     * mInitCheck is:
-     *   NO_INIT if a plugin hasn't been created yet
-     *   ERROR_UNSUPPORTED if a plugin can't be created for the uuid
-     *   OK after a plugin has been created and mPlugin is valid
-     */
-    status_t mInitCheck;
-
-    std::vector<sp<IDrmFactory>> makeDrmFactories();
-    sp<IDrmPlugin> makeDrmPlugin(const sp<IDrmFactory>& factory,
-            const uint8_t uuid[16], const String8& appPackageName);
-
-    void writeByteArray(Parcel &obj, const hidl_vec<uint8_t>& array);
-
-    std::string reportPluginMetrics() const;
-    std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
-    status_t getPropertyStringInternal(String8 const &name, String8 &value) const;
-    status_t getPropertyByteArrayInternal(String8 const &name,
-                                          Vector<uint8_t> &value) const;
-    status_t matchMimeTypeAndSecurityLevel(const sp<IDrmFactory> &factory,
-                                           const uint8_t uuid[16],
-                                           const String8 &mimeType,
-                                           DrmPlugin::SecurityLevel level,
-                                           bool *isSupported);
-
+    sp<IDrm> mDrmHalHidl;
+    sp<IDrm> mDrmHalAidl;
     DISALLOW_EVIL_CONSTRUCTORS(DrmHal);
 };
 
-}  // namespace android
+} // namespace android
 
-#endif  // DRM_HAL_H_
+#endif  // DRM_HAL_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
new file mode 100644
index 0000000..0f51ce9
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_HAL_AIDL_H_
+#define DRM_HAL_AIDL_H_
+
+#include <memory>
+#include <aidl/android/hardware/drm/BnDrmPluginListener.h>
+#include <aidl/android/hardware/drm/IDrmFactory.h>
+#include <aidl/android/hardware/drm/IDrmPlugin.h>
+#include <aidl/android/media/BnResourceManagerClient.h>
+#include <mediadrm/DrmMetrics.h>
+#include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmHalListener.h>
+#include <mediadrm/IDrm.h>
+
+using IDrmPluginAidl = ::aidl::android::hardware::drm::IDrmPlugin;
+using IDrmFactoryAidl = ::aidl::android::hardware::drm::IDrmFactory;
+using EventTypeAidl = ::aidl::android::hardware::drm::EventType;
+using KeyStatusAidl = ::aidl::android::hardware::drm::KeyStatus;
+using ::aidl::android::hardware::drm::Uuid;
+
+namespace android {
+struct DrmHalAidl : public IDrm{
+    struct DrmSessionClient;
+    DrmHalAidl();
+    virtual ~DrmHalAidl();
+    virtual status_t initCheck() const;
+    virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16], const String8& mimeType,
+                                             DrmPlugin::SecurityLevel securityLevel, bool* result);
+    virtual status_t createPlugin(const uint8_t uuid[16], const String8& appPackageName);
+    virtual status_t destroyPlugin();
+    virtual status_t openSession(DrmPlugin::SecurityLevel securityLevel,
+                                 Vector<uint8_t>& sessionId);
+    virtual status_t closeSession(Vector<uint8_t> const& sessionId);
+    virtual status_t getKeyRequest(Vector<uint8_t> const& sessionId,
+                                   Vector<uint8_t> const& initData, String8 const& mimeType,
+                                   DrmPlugin::KeyType keyType,
+                                   KeyedVector<String8, String8> const& optionalParameters,
+                                   Vector<uint8_t>& request, String8& defaultUrl,
+                                   DrmPlugin::KeyRequestType* keyRequestType);
+    virtual status_t provideKeyResponse(Vector<uint8_t> const& sessionId,
+                                        Vector<uint8_t> const& response, Vector<uint8_t>& keySetId);
+    virtual status_t removeKeys(Vector<uint8_t> const& keySetId);
+    virtual status_t restoreKeys(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keySetId);
+    virtual status_t queryKeyStatus(Vector<uint8_t> const& sessionId,
+                                    KeyedVector<String8, String8>& infoMap) const;
+    virtual status_t getProvisionRequest(String8 const& certType, String8 const& certAuthority,
+                                         Vector<uint8_t>& request, String8& defaultUrl);
+    virtual status_t provideProvisionResponse(Vector<uint8_t> const& response,
+                                              Vector<uint8_t>& certificate,
+                                              Vector<uint8_t>& wrappedKey);
+    virtual status_t getSecureStops(List<Vector<uint8_t>>& secureStops);
+    virtual status_t getSecureStopIds(List<Vector<uint8_t>>& secureStopIds);
+    virtual status_t getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop);
+    virtual status_t releaseSecureStops(Vector<uint8_t> const& ssRelease);
+    virtual status_t removeSecureStop(Vector<uint8_t> const& ssid);
+    virtual status_t removeAllSecureStops();
+    virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel* connectedLevel,
+                                   DrmPlugin::HdcpLevel* maxLevel) const;
+    virtual status_t getNumberOfSessions(uint32_t* currentSessions, uint32_t* maxSessions) const;
+    virtual status_t getSecurityLevel(Vector<uint8_t> const& sessionId,
+                                      DrmPlugin::SecurityLevel* level) const;
+    virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const;
+    virtual status_t removeOfflineLicense(Vector<uint8_t> const& keySetId);
+    virtual status_t getOfflineLicenseState(Vector<uint8_t> const& keySetId,
+                                            DrmPlugin::OfflineLicenseState* licenseState) const;
+    virtual status_t getPropertyString(String8 const& name, String8& value) const;
+    virtual status_t getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const;
+    virtual status_t setPropertyString(String8 const& name, String8 const& value) const;
+    virtual status_t setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const;
+    virtual status_t getMetrics(const sp<IDrmMetricsConsumer>& consumer);
+    virtual status_t setCipherAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm);
+    virtual status_t setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm);
+    virtual status_t encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                             Vector<uint8_t>& output);
+    virtual status_t decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                             Vector<uint8_t> const& input, Vector<uint8_t> const& iv,
+                             Vector<uint8_t>& output);
+    virtual status_t sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                          Vector<uint8_t> const& message, Vector<uint8_t>& signature);
+    virtual status_t verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
+                            Vector<uint8_t> const& message, Vector<uint8_t> const& signature,
+                            bool& match);
+    virtual status_t signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
+                             Vector<uint8_t> const& message, Vector<uint8_t> const& wrappedKey,
+                             Vector<uint8_t>& signature);
+    virtual status_t setListener(const sp<IDrmClient>& listener);
+    virtual status_t requiresSecureDecoder(const char* mime, bool* required) const;
+    virtual status_t requiresSecureDecoder(const char* mime, DrmPlugin::SecurityLevel securityLevel,
+                                           bool* required) const;
+    virtual status_t setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
+
+    ::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
+                                 const std::vector<uint8_t>& in_sessionId,
+                                 const std::vector<uint8_t>& in_data);
+    ::ndk::ScopedAStatus onExpirationUpdate(const std::vector<uint8_t>& in_sessionId,
+                                            int64_t in_expiryTimeInMS);
+    ::ndk::ScopedAStatus onKeysChange(const std::vector<uint8_t>& in_sessionId,
+                                      const std::vector<KeyStatusAidl>& in_keyStatusList,
+                                      bool in_hasNewUsableKey);
+    ::ndk::ScopedAStatus onSessionLostState(const std::vector<uint8_t>& in_sessionId);
+  private:
+    static Mutex mLock;
+    mutable MediaDrmMetrics mMetrics;
+    std::shared_ptr<DrmHalListener> mListener;
+    const std::vector<std::shared_ptr<IDrmFactoryAidl>> mFactories;
+    std::shared_ptr<IDrmPluginAidl> mPlugin;
+    status_t mInitCheck;
+    std::vector<std::shared_ptr<DrmSessionClient>> mOpenSessions;
+    void cleanup();
+    void closeOpenSessions();
+    std::string reportPluginMetrics() const;
+    std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
+    status_t getPropertyStringInternal(String8 const& name, String8& value) const;
+    status_t getPropertyByteArrayInternal(String8 const& name, Vector<uint8_t>& value) const;
+    DISALLOW_EVIL_CONSTRUCTORS(DrmHalAidl);
+};
+
+}  // namespace android
+
+#endif // DRM_HAL_AIDL_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
new file mode 100644
index 0000000..11f0608
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
@@ -0,0 +1,255 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_HAL_HIDL_H_
+#define DRM_HAL_HIDL_H_
+
+#include <android/hardware/drm/1.2/IDrmFactory.h>
+#include <android/hardware/drm/1.2/IDrmPlugin.h>
+#include <android/hardware/drm/1.2/IDrmPluginListener.h>
+#include <android/hardware/drm/1.4/IDrmPlugin.h>
+#include <android/hardware/drm/1.4/types.h>
+
+#include <media/drm/DrmAPI.h>
+#include <mediadrm/DrmMetrics.h>
+#include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/IDrm.h>
+#include <mediadrm/IDrmClient.h>
+#include <mediadrm/IDrmMetricsConsumer.h>
+#include <utils/threads.h>
+
+namespace drm = ::android::hardware::drm;
+using drm::V1_0::EventType;
+using drm::V1_0::IDrmFactory;
+using drm::V1_0::IDrmPlugin;
+using drm::V1_0::IDrmPluginListener;
+using drm::V1_1::SecurityLevel;
+using drm::V1_2::KeyStatus;
+using drm::V1_2::OfflineLicenseState;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+typedef drm::V1_2::IDrmPluginListener IDrmPluginListener_V1_2;
+typedef drm::V1_0::KeyStatus KeyStatus_V1_0;
+
+namespace android {
+
+struct DrmSessionClientInterface;
+
+inline bool operator==(const Vector<uint8_t> &l, const Vector<uint8_t> &r) {
+    if (l.size() != r.size()) return false;
+    return memcmp(l.array(), r.array(), l.size()) == 0;
+}
+
+struct DrmHalHidl : public IDrm,
+                public IDrmPluginListener_V1_2 {
+
+    struct DrmSessionClient;
+
+    DrmHalHidl();
+    virtual ~DrmHalHidl();
+
+    virtual status_t initCheck() const;
+
+    virtual status_t isCryptoSchemeSupported(const uint8_t uuid[16],
+                                             const String8& mimeType,
+                                             DrmPlugin::SecurityLevel level,
+                                             bool *isSupported);
+
+    virtual status_t createPlugin(const uint8_t uuid[16],
+                                  const String8 &appPackageName);
+
+    virtual status_t destroyPlugin();
+
+    virtual status_t openSession(DrmPlugin::SecurityLevel level,
+            Vector<uint8_t> &sessionId);
+
+    virtual status_t closeSession(Vector<uint8_t> const &sessionId);
+
+    virtual status_t
+        getKeyRequest(Vector<uint8_t> const &sessionId,
+                      Vector<uint8_t> const &initData,
+                      String8 const &mimeType, DrmPlugin::KeyType keyType,
+                      KeyedVector<String8, String8> const &optionalParameters,
+                      Vector<uint8_t> &request, String8 &defaultUrl,
+                      DrmPlugin::KeyRequestType *keyRequestType);
+
+    virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
+                                        Vector<uint8_t> const &response,
+                                        Vector<uint8_t> &keySetId);
+
+    virtual status_t removeKeys(Vector<uint8_t> const &keySetId);
+
+    virtual status_t restoreKeys(Vector<uint8_t> const &sessionId,
+                                 Vector<uint8_t> const &keySetId);
+
+    virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId,
+                                    KeyedVector<String8, String8> &infoMap) const;
+
+    virtual status_t getProvisionRequest(String8 const &certType,
+                                         String8 const &certAuthority,
+                                         Vector<uint8_t> &request,
+                                         String8 &defaultUrl);
+
+    virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
+                                              Vector<uint8_t> &certificate,
+                                              Vector<uint8_t> &wrappedKey);
+
+    virtual status_t getSecureStops(List<Vector<uint8_t>> &secureStops);
+    virtual status_t getSecureStopIds(List<Vector<uint8_t>> &secureStopIds);
+    virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
+
+    virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
+    virtual status_t removeSecureStop(Vector<uint8_t> const &ssid);
+    virtual status_t removeAllSecureStops();
+
+    virtual status_t getHdcpLevels(DrmPlugin::HdcpLevel *connectedLevel,
+            DrmPlugin::HdcpLevel *maxLevel) const;
+    virtual status_t getNumberOfSessions(uint32_t *currentSessions,
+            uint32_t *maxSessions) const;
+    virtual status_t getSecurityLevel(Vector<uint8_t> const &sessionId,
+            DrmPlugin::SecurityLevel *level) const;
+
+    virtual status_t getOfflineLicenseKeySetIds(List<Vector<uint8_t>> &keySetIds) const;
+    virtual status_t removeOfflineLicense(Vector<uint8_t> const &keySetId);
+    virtual status_t getOfflineLicenseState(Vector<uint8_t> const &keySetId,
+            DrmPlugin::OfflineLicenseState *licenseState) const;
+
+    virtual status_t getPropertyString(String8 const &name, String8 &value ) const;
+    virtual status_t getPropertyByteArray(String8 const &name,
+                                          Vector<uint8_t> &value ) const;
+    virtual status_t setPropertyString(String8 const &name, String8 const &value ) const;
+    virtual status_t setPropertyByteArray(String8 const &name,
+                                          Vector<uint8_t> const &value ) const;
+    virtual status_t getMetrics(const sp<IDrmMetricsConsumer> &consumer);
+
+    virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId,
+                                        String8 const &algorithm);
+
+    virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId,
+                                     String8 const &algorithm);
+
+    virtual status_t encrypt(Vector<uint8_t> const &sessionId,
+                             Vector<uint8_t> const &keyId,
+                             Vector<uint8_t> const &input,
+                             Vector<uint8_t> const &iv,
+                             Vector<uint8_t> &output);
+
+    virtual status_t decrypt(Vector<uint8_t> const &sessionId,
+                             Vector<uint8_t> const &keyId,
+                             Vector<uint8_t> const &input,
+                             Vector<uint8_t> const &iv,
+                             Vector<uint8_t> &output);
+
+    virtual status_t sign(Vector<uint8_t> const &sessionId,
+                          Vector<uint8_t> const &keyId,
+                          Vector<uint8_t> const &message,
+                          Vector<uint8_t> &signature);
+
+    virtual status_t verify(Vector<uint8_t> const &sessionId,
+                            Vector<uint8_t> const &keyId,
+                            Vector<uint8_t> const &message,
+                            Vector<uint8_t> const &signature,
+                            bool &match);
+
+    virtual status_t signRSA(Vector<uint8_t> const &sessionId,
+                             String8 const &algorithm,
+                             Vector<uint8_t> const &message,
+                             Vector<uint8_t> const &wrappedKey,
+                             Vector<uint8_t> &signature);
+
+    virtual status_t setListener(const sp<IDrmClient>& listener);
+
+    virtual status_t requiresSecureDecoder(const char *mime, bool *required) const;
+
+    virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+                                           bool *required) const;
+
+    virtual status_t setPlaybackId(
+            Vector<uint8_t> const &sessionId,
+            const char *playbackId);
+
+    virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
+
+    // Methods of IDrmPluginListener
+    Return<void> sendEvent(EventType eventType,
+            const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& data);
+
+    Return<void> sendExpirationUpdate(const hidl_vec<uint8_t>& sessionId,
+            int64_t expiryTimeInMS);
+
+    Return<void> sendKeysChange(const hidl_vec<uint8_t>& sessionId,
+            const hidl_vec<KeyStatus_V1_0>& keyStatusList, bool hasNewUsableKey);
+
+    Return<void> sendKeysChange_1_2(const hidl_vec<uint8_t>& sessionId,
+            const hidl_vec<KeyStatus>& keyStatusList, bool hasNewUsableKey);
+
+    Return<void> sendSessionLostState(const hidl_vec<uint8_t>& sessionId);
+
+private:
+    static Mutex mLock;
+
+    sp<IDrmClient> mListener;
+    mutable Mutex mEventLock;
+    mutable Mutex mNotifyLock;
+
+    const std::vector<sp<IDrmFactory>> mFactories;
+    sp<IDrmPlugin> mPlugin;
+    sp<drm::V1_1::IDrmPlugin> mPluginV1_1;
+    sp<drm::V1_2::IDrmPlugin> mPluginV1_2;
+    sp<drm::V1_4::IDrmPlugin> mPluginV1_4;
+    String8 mAppPackageName;
+
+    // Mutable to allow modification within GetPropertyByteArray.
+    mutable MediaDrmMetrics mMetrics;
+
+    std::vector<std::shared_ptr<DrmSessionClient>> mOpenSessions;
+    void closeOpenSessions();
+    void cleanup();
+
+    /**
+     * mInitCheck is:
+     *   NO_INIT if a plugin hasn't been created yet
+     *   ERROR_UNSUPPORTED if a plugin can't be created for the uuid
+     *   OK after a plugin has been created and mPlugin is valid
+     */
+    status_t mInitCheck;
+
+    std::vector<sp<IDrmFactory>> makeDrmFactories();
+    sp<IDrmPlugin> makeDrmPlugin(const sp<IDrmFactory>& factory,
+            const uint8_t uuid[16], const String8& appPackageName);
+
+    void writeByteArray(Parcel &obj, const hidl_vec<uint8_t>& array);
+
+    std::string reportPluginMetrics() const;
+    std::string reportFrameworkMetrics(const std::string& pluginMetrics) const;
+    status_t getPropertyStringInternal(String8 const &name, String8 &value) const;
+    status_t getPropertyByteArrayInternal(String8 const &name,
+                                          Vector<uint8_t> &value) const;
+    status_t matchMimeTypeAndSecurityLevel(const sp<IDrmFactory> &factory,
+                                           const uint8_t uuid[16],
+                                           const String8 &mimeType,
+                                           DrmPlugin::SecurityLevel level,
+                                           bool *isSupported);
+
+    DISALLOW_EVIL_CONSTRUCTORS(DrmHalHidl);
+};
+
+}  // namespace android
+
+#endif // DRM_HAL_HIDL_H_
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalListener.h b/drm/libmediadrm/include/mediadrm/DrmHalListener.h
new file mode 100644
index 0000000..22361ad
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/DrmHalListener.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_HAL_LISTENER_H_
+#define DRM_HAL_LISTENER_H_
+
+#include <aidl/android/hardware/drm/BnDrmPluginListener.h>
+#include <mediadrm/DrmMetrics.h>
+#include <mediadrm/IDrmClient.h>
+
+using EventTypeAidl = ::aidl::android::hardware::drm::EventType;
+using KeyStatusAidl = ::aidl::android::hardware::drm::KeyStatus;
+using aidl::android::hardware::drm::BnDrmPluginListener;
+
+namespace android {
+struct DrmHalListener : public BnDrmPluginListener {
+    explicit DrmHalListener(MediaDrmMetrics* mMetrics);
+    ~DrmHalListener();
+    ::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
+                                 const std::vector<uint8_t>& in_sessionId,
+                                 const std::vector<uint8_t>& in_data);
+    ::ndk::ScopedAStatus onExpirationUpdate(const std::vector<uint8_t>& in_sessionId,
+                                            int64_t in_expiryTimeInMS);
+    ::ndk::ScopedAStatus onKeysChange(const std::vector<uint8_t>& in_sessionId,
+                                      const std::vector<KeyStatusAidl>& in_keyStatusList,
+                                      bool in_hasNewUsableKey);
+    ::ndk::ScopedAStatus onSessionLostState(const std::vector<uint8_t>& in_sessionId);
+    void setListener(sp<IDrmClient> listener);
+private:
+    mutable MediaDrmMetrics* mMetrics;
+    sp<IDrmClient> mListener;
+    mutable Mutex mEventLock;
+    mutable Mutex mNotifyLock;
+};
+} // namespace android
+
+#endif  // DRM_HAL_LISTENER_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/DrmMetrics.h b/drm/libmediadrm/include/mediadrm/DrmMetrics.h
index 100b8f7..e1775c7 100644
--- a/drm/libmediadrm/include/mediadrm/DrmMetrics.h
+++ b/drm/libmediadrm/include/mediadrm/DrmMetrics.h
@@ -50,12 +50,10 @@
   CounterMetric<status_t> mGetProvisionRequestCounter;
   // Count of provideProvisionResponse calls.
   CounterMetric<status_t> mProvideProvisionResponseCounter;
-
   // Count of key status events broken out by status type.
-  CounterMetric<::android::hardware::drm::V1_2::KeyStatusType>
-      mKeyStatusChangeCounter;
+  CounterMetric<uint32_t> mKeyStatusChangeCounter;
   // Count of events broken out by event type
-  CounterMetric<::android::hardware::drm::V1_0::EventType> mEventCounter;
+  CounterMetric<uint32_t> mEventCounter;
 
   // Count getPropertyByteArray calls to retrieve the device unique id.
   CounterMetric<status_t> mGetDeviceUniqueIdCounter;
diff --git a/drm/libmediadrm/include/mediadrm/IDrm.h b/drm/libmediadrm/include/mediadrm/IDrm.h
index a88784d..ee2be6a 100644
--- a/drm/libmediadrm/include/mediadrm/IDrm.h
+++ b/drm/libmediadrm/include/mediadrm/IDrm.h
@@ -165,6 +165,8 @@
 
     virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
 
+    virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const = 0;
+
 protected:
     IDrm() {}
 
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index ec0b878..980ce55 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -31,6 +31,7 @@
 #include <chrono>
 #include <cstddef>
 #include <cstdint>
+#include <cstring>
 #include <ctime>
 #include <deque>
 #include <endian.h>
@@ -38,12 +39,20 @@
 #include <mutex>
 #include <string>
 #include <vector>
-
+#include <aidl/android/hardware/drm/LogMessage.h>
+#include <aidl/android/hardware/drm/Status.h>
+#include <aidl/android/hardware/drm/IDrmFactory.h>
 
 using namespace ::android::hardware::drm;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::Return;
 
+using ::aidl::android::hardware::drm::LogPriority;
+using ::aidl::android::hardware::drm::LogMessage;
+using ::aidl::android::hardware::drm::Uuid;
+using StatusAidl = ::aidl::android::hardware::drm::Status;
+using IDrmFactoryAidl = ::aidl::android::hardware::drm::IDrmFactory;
+
 namespace android {
 
 struct ICrypto;
@@ -92,7 +101,8 @@
 
 template <typename... Args>
 void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
-    const uint64_t* uuid2 = reinterpret_cast<const uint64_t*>(uuid);
+    uint64_t uuid2[2] = {};
+    std::memcpy(uuid2, uuid, sizeof(uuid2));
     std::string uuidFmt("uuid=[%lx %lx] ");
     uuidFmt += fmt;
     LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
@@ -155,6 +165,14 @@
     obj.writeInt32(hasNewUsableKey);
 }
 
+inline Uuid toAidlUuid(const uint8_t uuid[16]) {
+    Uuid uuidAidl;
+    for (int i = 0; i < 16; ++i) uuidAidl.uuid[i] = uuid[i];
+    return uuidAidl;
+}
+
+std::vector<std::shared_ptr<IDrmFactoryAidl>> makeDrmFactoriesAidl();
+
 std::vector<sp<::V1_0::IDrmFactory>> MakeDrmFactories(const uint8_t uuid[16] = nullptr);
 
 std::vector<sp<::V1_0::IDrmPlugin>> MakeDrmPlugins(const uint8_t uuid[16],
@@ -180,6 +198,138 @@
     return toStatusT_1_4(err);
 }
 
+inline status_t statusAidlToStatusT(::ndk::ScopedAStatus &statusAidl) {
+    if (statusAidl.isOk()) return OK;
+    if (statusAidl.getExceptionCode() != EX_SERVICE_SPECIFIC) return DEAD_OBJECT;
+    auto status = static_cast<StatusAidl>(statusAidl.getServiceSpecificError());
+    switch (status) {
+    case StatusAidl::OK:
+        return OK;
+    case StatusAidl::BAD_VALUE:
+        return BAD_VALUE;
+    case StatusAidl::ERROR_DRM_CANNOT_HANDLE:
+        return ERROR_DRM_CANNOT_HANDLE;
+    case StatusAidl::ERROR_DRM_DECRYPT:
+        return ERROR_DRM_DECRYPT;
+    case StatusAidl::ERROR_DRM_DEVICE_REVOKED:
+        return ERROR_DRM_DEVICE_REVOKED;
+    case StatusAidl::ERROR_DRM_FRAME_TOO_LARGE:
+        return ERROR_DRM_FRAME_TOO_LARGE;
+    case StatusAidl::ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION:
+        return ERROR_DRM_INSUFFICIENT_OUTPUT_PROTECTION;
+    case StatusAidl::ERROR_DRM_INSUFFICIENT_SECURITY:
+        return ERROR_DRM_INSUFFICIENT_SECURITY;
+    case StatusAidl::ERROR_DRM_INVALID_STATE:
+        return ERROR_DRM_INVALID_STATE;
+    case StatusAidl::ERROR_DRM_LICENSE_EXPIRED:
+        return ERROR_DRM_LICENSE_EXPIRED;
+    case StatusAidl::ERROR_DRM_NO_LICENSE:
+        return ERROR_DRM_NO_LICENSE;
+    case StatusAidl::ERROR_DRM_NOT_PROVISIONED:
+        return ERROR_DRM_NOT_PROVISIONED;
+    case StatusAidl::ERROR_DRM_RESOURCE_BUSY:
+        return ERROR_DRM_RESOURCE_BUSY;
+    case StatusAidl::ERROR_DRM_RESOURCE_CONTENTION:
+        return ERROR_DRM_RESOURCE_CONTENTION;
+    case StatusAidl::ERROR_DRM_SESSION_LOST_STATE:
+        return ERROR_DRM_SESSION_LOST_STATE;
+    case StatusAidl::ERROR_DRM_SESSION_NOT_OPENED:
+        return ERROR_DRM_SESSION_NOT_OPENED;
+
+    // New in S / drm@1.4:
+    case StatusAidl::CANNOT_DECRYPT_ZERO_SUBSAMPLES:
+        return ERROR_DRM_ZERO_SUBSAMPLES;
+    case StatusAidl::CRYPTO_LIBRARY_ERROR:
+        return ERROR_DRM_CRYPTO_LIBRARY;
+    case StatusAidl::GENERAL_OEM_ERROR:
+        return ERROR_DRM_GENERIC_OEM;
+    case StatusAidl::GENERAL_PLUGIN_ERROR:
+        return ERROR_DRM_GENERIC_PLUGIN;
+    case StatusAidl::INIT_DATA_INVALID:
+        return ERROR_DRM_INIT_DATA;
+    case StatusAidl::KEY_NOT_LOADED:
+        return ERROR_DRM_KEY_NOT_LOADED;
+    case StatusAidl::LICENSE_PARSE_ERROR:
+        return ERROR_DRM_LICENSE_PARSE;
+    case StatusAidl::LICENSE_POLICY_ERROR:
+        return ERROR_DRM_LICENSE_POLICY;
+    case StatusAidl::LICENSE_RELEASE_ERROR:
+        return ERROR_DRM_LICENSE_RELEASE;
+    case StatusAidl::LICENSE_REQUEST_REJECTED:
+        return ERROR_DRM_LICENSE_REQUEST_REJECTED;
+    case StatusAidl::LICENSE_RESTORE_ERROR:
+        return ERROR_DRM_LICENSE_RESTORE;
+    case StatusAidl::LICENSE_STATE_ERROR:
+        return ERROR_DRM_LICENSE_STATE;
+    case StatusAidl::MALFORMED_CERTIFICATE:
+        return ERROR_DRM_CERTIFICATE_MALFORMED;
+    case StatusAidl::MEDIA_FRAMEWORK_ERROR:
+        return ERROR_DRM_MEDIA_FRAMEWORK;
+    case StatusAidl::MISSING_CERTIFICATE:
+        return ERROR_DRM_CERTIFICATE_MISSING;
+    case StatusAidl::PROVISIONING_CERTIFICATE_ERROR:
+        return ERROR_DRM_PROVISIONING_CERTIFICATE;
+    case StatusAidl::PROVISIONING_CONFIGURATION_ERROR:
+        return ERROR_DRM_PROVISIONING_CONFIG;
+    case StatusAidl::PROVISIONING_PARSE_ERROR:
+        return ERROR_DRM_PROVISIONING_PARSE;
+    case StatusAidl::PROVISIONING_REQUEST_REJECTED:
+        return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
+    case StatusAidl::RETRYABLE_PROVISIONING_ERROR:
+        return ERROR_DRM_PROVISIONING_RETRY;
+    case StatusAidl::SECURE_STOP_RELEASE_ERROR:
+        return ERROR_DRM_SECURE_STOP_RELEASE;
+    case StatusAidl::STORAGE_READ_FAILURE:
+        return ERROR_DRM_STORAGE_READ;
+    case StatusAidl::STORAGE_WRITE_FAILURE:
+        return ERROR_DRM_STORAGE_WRITE;
+
+    case StatusAidl::ERROR_DRM_UNKNOWN:
+    default:
+        return ERROR_DRM_UNKNOWN;
+    }
+    return ERROR_DRM_UNKNOWN;
+}
+
+template<typename T, typename U>
+status_t GetLogMessagesAidl(const std::shared_ptr<U> &obj, Vector<::V1_4::LogMessage> &logs) {
+    std::shared_ptr<T> plugin = obj;
+    if (obj == NULL) {
+        LOG2BW("%s obj is null", U::descriptor);
+    } else if (plugin == NULL) {
+        LOG2BW("Cannot cast %s obj to %s plugin", U::descriptor, T::descriptor);
+    }
+
+    std::vector<LogMessage> pluginLogsAidl;
+    if (plugin != NULL) {
+        if(!plugin->getLogMessages(&pluginLogsAidl).isOk()) {
+            LOG2BW("%s::getLogMessages remote call failed", T::descriptor);
+        }
+    }
+
+    std::vector<::V1_4::LogMessage> pluginLogs;
+    for (LogMessage log : pluginLogsAidl) {
+        ::V1_4::LogMessage logHidl;
+        logHidl.timeMs = log.timeMs;
+        // skip negative convert check as count of enum elements is 7
+        logHidl.priority =  static_cast<::V1_4::LogPriority>((int32_t)log.priority);
+        logHidl.message = log.message;
+        pluginLogs.push_back(logHidl);
+    }
+
+    auto allLogs(gLogBuf.getLogs());
+    LOG2BD("framework logs size %zu; plugin logs size %zu",
+           allLogs.size(), pluginLogs.size());
+    std::copy(pluginLogs.begin(), pluginLogs.end(), std::back_inserter(allLogs));
+    std::sort(allLogs.begin(), allLogs.end(),
+              [](const ::V1_4::LogMessage &a, const ::V1_4::LogMessage &b) {
+                  return a.timeMs < b.timeMs;
+              });
+
+    logs.appendVector(allLogs);
+    return OK;
+}
+
 template<typename T, typename U>
 status_t GetLogMessages(const sp<U> &obj, Vector<::V1_4::LogMessage> &logs) {
     sp<T> plugin = T::castFrom(obj);
diff --git a/drm/libmediadrm/tests/DrmMetrics_test.cpp b/drm/libmediadrm/tests/DrmMetrics_test.cpp
index f362d60..237a88b 100644
--- a/drm/libmediadrm/tests/DrmMetrics_test.cpp
+++ b/drm/libmediadrm/tests/DrmMetrics_test.cpp
@@ -83,8 +83,8 @@
   metrics.mProvideProvisionResponseCounter.Increment(OK);
   metrics.mGetDeviceUniqueIdCounter.Increment(OK);
 
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
-  metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::USABLE);
+  metrics.mEventCounter.Increment((uint32_t)EventType::PROVISION_REQUIRED);
 
   PersistableBundle bundle;
   DrmMetricsConsumer consumer(&bundle);
@@ -151,16 +151,16 @@
   metrics.mGetDeviceUniqueIdCounter.Increment(OK);
   metrics.mGetDeviceUniqueIdCounter.Increment(UNEXPECTED_NULL);
 
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::EXPIRED);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::OUTPUTNOTALLOWED);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::STATUSPENDING);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::INTERNALERROR);
-  metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
-  metrics.mEventCounter.Increment(EventType::KEY_NEEDED);
-  metrics.mEventCounter.Increment(EventType::KEY_EXPIRED);
-  metrics.mEventCounter.Increment(EventType::VENDOR_DEFINED);
-  metrics.mEventCounter.Increment(EventType::SESSION_RECLAIMED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::USABLE);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::EXPIRED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::OUTPUTNOTALLOWED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::STATUSPENDING);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::INTERNALERROR);
+  metrics.mEventCounter.Increment((uint32_t)EventType::PROVISION_REQUIRED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::KEY_NEEDED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::KEY_EXPIRED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::VENDOR_DEFINED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::SESSION_RECLAIMED);
 
   android::Vector<uint8_t> sessionId1;
   sessionId1.push_back(1);
@@ -284,16 +284,16 @@
   metrics.mGetDeviceUniqueIdCounter.Increment(OK);
   metrics.mGetDeviceUniqueIdCounter.Increment(UNEXPECTED_NULL);
 
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::USABLE);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::EXPIRED);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::OUTPUTNOTALLOWED);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::STATUSPENDING);
-  metrics.mKeyStatusChangeCounter.Increment(KeyStatusType::INTERNALERROR);
-  metrics.mEventCounter.Increment(EventType::PROVISION_REQUIRED);
-  metrics.mEventCounter.Increment(EventType::KEY_NEEDED);
-  metrics.mEventCounter.Increment(EventType::KEY_EXPIRED);
-  metrics.mEventCounter.Increment(EventType::VENDOR_DEFINED);
-  metrics.mEventCounter.Increment(EventType::SESSION_RECLAIMED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::USABLE);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::EXPIRED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::OUTPUTNOTALLOWED);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::STATUSPENDING);
+  metrics.mKeyStatusChangeCounter.Increment((uint32_t)KeyStatusType::INTERNALERROR);
+  metrics.mEventCounter.Increment((uint32_t)EventType::PROVISION_REQUIRED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::KEY_NEEDED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::KEY_EXPIRED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::VENDOR_DEFINED);
+  metrics.mEventCounter.Increment((uint32_t)EventType::SESSION_RECLAIMED);
 
   std::string serializedMetrics;
   ASSERT_EQ(OK, metrics.GetSerializedMetrics(&serializedMetrics));
diff --git a/drm/mediadrm/plugins/TEST_MAPPING b/drm/mediadrm/plugins/TEST_MAPPING
index 7bd1568..9919e90 100644
--- a/drm/mediadrm/plugins/TEST_MAPPING
+++ b/drm/mediadrm/plugins/TEST_MAPPING
@@ -1,16 +1,10 @@
 {
   "presubmit": [
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaDrmFrameworkTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "include-filter": "android.media.cts.MediaDrmClearkeyTest"
-        },
-        {
-          "include-filter": "android.media.cts.MediaDrmMetricsTest"
         }
       ]
     }
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
new file mode 100644
index 0000000..2d1f741
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -0,0 +1,71 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "hardware_interfaces_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+    name: "aidl_clearkey_service_defaults",
+    vendor: true,
+
+    srcs: [
+        "CreatePluginFactories.cpp",
+        "CryptoPlugin.cpp",
+        "DrmFactory.cpp",
+        "DrmPlugin.cpp",
+    ],
+
+    relative_install_path: "hw",
+
+    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+
+    include_dirs: ["frameworks/av/include"],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcrypto",
+        "liblog",
+        "libprotobuf-cpp-lite",
+        "libutils",
+        "android.hardware.drm-V1-ndk",
+    ],
+
+    static_libs: [
+        "android.hardware.common-V2-ndk",
+        "libclearkeybase",
+    ],
+
+    local_include_dirs: ["include"],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+}
+
+cc_binary {
+    name: "android.hardware.drm-service.clearkey",
+    defaults: ["aidl_clearkey_service_defaults"],
+    srcs: ["Service.cpp"],
+    init_rc: ["android.hardware.drm-service.clearkey.rc"],
+    vintf_fragments: ["android.hardware.drm-service.clearkey.xml"],
+}
+
+cc_binary {
+    name: "android.hardware.drm-service-lazy.clearkey",
+    defaults: ["aidl_clearkey_service_defaults"],
+    overrides: ["android.hardware.drm-service.clearkey"],
+    srcs: ["ServiceLazy.cpp"],
+    init_rc: ["android.hardware.drm-service-lazy.clearkey.rc"],
+    vintf_fragments: ["android.hardware.drm-service.clearkey.xml"],
+}
+
+phony {
+    name: "android.hardware.drm@latest-service.clearkey",
+    required: [
+        "android.hardware.drm-service.clearkey",
+    ],
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/CreatePluginFactories.cpp b/drm/mediadrm/plugins/clearkey/aidl/CreatePluginFactories.cpp
new file mode 100644
index 0000000..4e6fe3a
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/CreatePluginFactories.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "CreatePluginFactories.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+std::shared_ptr<DrmFactory> createDrmFactory() {
+  return ::ndk::SharedRefBase::make<DrmFactory>();
+}
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
new file mode 100644
index 0000000..201cf02
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
@@ -0,0 +1,239 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-CryptoPlugin"
+
+#include <utils/Log.h>
+#include <cerrno>
+#include <cstring>
+
+#include "CryptoPlugin.h"
+#include "SessionLibrary.h"
+#include "AidlUtils.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+using ::aidl::android::hardware::drm::Status;
+
+::ndk::ScopedAStatus CryptoPlugin::decrypt(const DecryptArgs& in_args, int32_t* _aidl_return) {
+    const char* detailedError = "";
+
+    *_aidl_return = 0;
+    if (in_args.secure) {
+        detailedError = "secure decryption is not supported with ClearKey";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
+    if (mSharedBufferMap.find(in_args.source.bufferId) == mSharedBufferMap.end()) {
+        detailedError = "source decrypt buffer base not set";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    const auto NON_SECURE = DestinationBuffer::Tag::nonsecureMemory;
+    if (in_args.destination.getTag() != NON_SECURE) {
+        detailedError = "destination type not supported";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    const SharedBuffer& destBuffer = in_args.destination.get<NON_SECURE>();
+    if (mSharedBufferMap.find(destBuffer.bufferId) == mSharedBufferMap.end()) {
+        detailedError = "destination decrypt buffer base not set";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    auto src = mSharedBufferMap[in_args.source.bufferId];
+    if (src->mBase == nullptr) {
+        detailedError = "source is a nullptr";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    size_t totalSize = 0;
+    if (__builtin_add_overflow(in_args.source.offset, in_args.offset, &totalSize) ||
+        __builtin_add_overflow(totalSize, in_args.source.size, &totalSize) ||
+        totalSize > src->mSize) {
+        android_errorWriteLog(0x534e4554, "176496160");
+        detailedError = "invalid buffer size";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    // destination type must be non-secure shared memory
+    auto dest = mSharedBufferMap[destBuffer.bufferId];
+    if (dest->mBase == nullptr) {
+        detailedError = "destination is a nullptr";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+
+    totalSize = 0;
+    if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
+        totalSize > dest->mSize) {
+        android_errorWriteLog(0x534e4554, "176444622");
+        detailedError = "invalid buffer size";
+        return toNdkScopedAStatus(Status::ERROR_DRM_FRAME_TOO_LARGE, detailedError);
+    }
+
+    // Calculate the output buffer size and determine if any subsamples are
+    // encrypted.
+    uint8_t* srcPtr = src->mBase + in_args.source.offset + in_args.offset;
+    uint8_t* destPtr = dest->mBase + destBuffer.offset;
+    size_t destSize = 0;
+    size_t srcSize = 0;
+    bool haveEncryptedSubsamples = false;
+    for (size_t i = 0; i < in_args.subSamples.size(); i++) {
+        const SubSample& subSample = in_args.subSamples[i];
+        if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize) ||
+            __builtin_add_overflow(srcSize, subSample.numBytesOfClearData, &srcSize)) {
+            detailedError = "subsample clear size overflow";
+            return toNdkScopedAStatus(Status::ERROR_DRM_FRAME_TOO_LARGE, detailedError);
+        }
+        if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize) ||
+            __builtin_add_overflow(srcSize, subSample.numBytesOfEncryptedData, &srcSize)) {
+            detailedError = "subsample encrypted size overflow";
+            return toNdkScopedAStatus(Status::ERROR_DRM_FRAME_TOO_LARGE, detailedError);
+        }
+        if (subSample.numBytesOfEncryptedData > 0) {
+            haveEncryptedSubsamples = true;
+        }
+    }
+
+    if (destSize > destBuffer.size || srcSize > in_args.source.size) {
+        detailedError = "subsample sum too large";
+        return toNdkScopedAStatus(Status::ERROR_DRM_FRAME_TOO_LARGE, detailedError);
+    }
+
+    if (in_args.mode == Mode::UNENCRYPTED) {
+        if (haveEncryptedSubsamples) {
+            detailedError = "Encrypted subsamples found in allegedly unencrypted data.";
+            return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+        }
+
+        size_t offset = 0;
+        for (size_t i = 0; i < in_args.subSamples.size(); ++i) {
+            const SubSample& subSample = in_args.subSamples[i];
+            if (subSample.numBytesOfClearData != 0) {
+                memcpy(reinterpret_cast<uint8_t*>(destPtr) + offset,
+                       reinterpret_cast<const uint8_t*>(srcPtr) + offset,
+                       subSample.numBytesOfClearData);
+                offset += subSample.numBytesOfClearData;
+            }
+        }
+
+        *_aidl_return = static_cast<ssize_t>(offset);
+        return toNdkScopedAStatus(Status::OK);
+    } else if (in_args.mode == Mode::AES_CTR) {
+        size_t bytesDecrypted{};
+        std::vector<int32_t> clearDataLengths;
+        std::vector<int32_t> encryptedDataLengths;
+        for (auto ss : in_args.subSamples) {
+            clearDataLengths.push_back(ss.numBytesOfClearData);
+            encryptedDataLengths.push_back(ss.numBytesOfEncryptedData);
+        }
+        auto res =
+                mSession->decrypt(in_args.keyId.data(), in_args.iv.data(),
+                                  srcPtr, static_cast<uint8_t*>(destPtr),
+                                  clearDataLengths, encryptedDataLengths,
+                                  &bytesDecrypted);
+        if (res == clearkeydrm::OK) {
+            *_aidl_return = static_cast<ssize_t>(bytesDecrypted);
+            return toNdkScopedAStatus(Status::OK);
+        } else {
+            *_aidl_return = 0;
+            detailedError = "Decryption Error";
+            return toNdkScopedAStatus(static_cast<Status>(res), detailedError);
+        }
+    } else {
+        *_aidl_return = 0;
+        detailedError = "selected encryption mode is not supported by the ClearKey DRM Plugin";
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
+    }
+}
+
+::ndk::ScopedAStatus CryptoPlugin::getLogMessages(
+        std::vector<::aidl::android::hardware::drm::LogMessage>* _aidl_return) {
+    using std::chrono::duration_cast;
+    using std::chrono::milliseconds;
+    using std::chrono::system_clock;
+
+    auto timeMillis = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
+
+    std::vector<::aidl::android::hardware::drm::LogMessage> logs = {
+            {timeMillis, ::aidl::android::hardware::drm::LogPriority::ERROR,
+             std::string("Not implemented")}};
+    *_aidl_return = logs;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus CryptoPlugin::notifyResolution(int32_t in_width, int32_t in_height) {
+    UNUSED(in_width);
+    UNUSED(in_height);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus CryptoPlugin::requiresSecureDecoderComponent(const std::string& in_mime,
+                                                                  bool* _aidl_return) {
+    UNUSED(in_mime);
+    *_aidl_return = false;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus CryptoPlugin::setMediaDrmSession(const std::vector<uint8_t>& in_sessionId) {
+    Status status = Status::OK;
+    if (!in_sessionId.size()) {
+        mSession = nullptr;
+    } else {
+        mSession = SessionLibrary::get()->findSession(in_sessionId);
+        if (!mSession.get()) {
+            status = Status::ERROR_DRM_SESSION_NOT_OPENED;
+        }
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus CryptoPlugin::setSharedBufferBase(const SharedBuffer& in_base) {
+    std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
+    mSharedBufferMap[in_base.bufferId] = std::make_shared<SharedBufferBase>(in_base);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+SharedBufferBase::SharedBufferBase(const SharedBuffer& mem)
+        : mBase(nullptr),
+          mSize(mem.size) {
+    if (mem.handle.fds.empty()) {
+        return;
+    }
+    auto fd = mem.handle.fds[0].get();
+    auto addr = mmap(nullptr, mem.size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+    if (addr == MAP_FAILED) {
+        ALOGE("mmap err: fd %d; errno %s", fd, strerror(errno));
+    } else {
+        mBase = static_cast<uint8_t*>(addr);
+    }
+}
+
+SharedBufferBase::~SharedBufferBase() {
+    if (mBase && munmap(mBase, mSize)) {
+        ALOGE("munmap err: base %p; errno %s",
+              mBase, strerror(errno));
+    }
+}
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
new file mode 100644
index 0000000..30db4c0
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-DrmFactory"
+
+#include <utils/Log.h>
+
+#include "DrmFactory.h"
+
+#include "ClearKeyUUID.h"
+#include "CryptoPlugin.h"
+#include "DrmPlugin.h"
+#include "MimeTypeStdStr.h"
+#include "SessionLibrary.h"
+#include "AidlUtils.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+using std::string;
+using std::vector;
+
+using ::aidl::android::hardware::drm::SecurityLevel;
+using ::aidl::android::hardware::drm::Status;
+using ::aidl::android::hardware::drm::Uuid;
+
+::ndk::ScopedAStatus DrmFactory::createDrmPlugin(
+        const Uuid& in_uuid, const string& in_appPackageName,
+        std::shared_ptr<::aidl::android::hardware::drm::IDrmPlugin>* _aidl_return) {
+    UNUSED(in_appPackageName);
+
+    if (!isClearKeyUUID(in_uuid.uuid.data())) {
+        ALOGE("Clearkey Drm HAL: failed to create drm plugin, "
+              "invalid crypto scheme");
+        *_aidl_return = nullptr;
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::shared_ptr<DrmPlugin> plugin =
+            ::ndk::SharedRefBase::make<DrmPlugin>(SessionLibrary::get());
+    *_aidl_return = plugin;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmFactory::createCryptoPlugin(
+        const Uuid& in_uuid, const std::vector<uint8_t>& in_initData,
+        std::shared_ptr<::aidl::android::hardware::drm::ICryptoPlugin>* _aidl_return) {
+    if (!isClearKeyUUID(in_uuid.uuid.data())) {
+        ALOGE("Clearkey Drm HAL: failed to create crypto plugin, "
+              "invalid crypto scheme");
+        *_aidl_return = nullptr;
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::shared_ptr<CryptoPlugin> plugin = ::ndk::SharedRefBase::make<CryptoPlugin>(in_initData);
+    Status status = plugin->getInitStatus();
+    if (status != Status::OK) {
+        plugin.reset();
+        plugin = nullptr;
+    }
+    *_aidl_return = plugin;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmFactory::getSupportedCryptoSchemes(CryptoSchemes* _aidl_return) {
+    CryptoSchemes schemes{};
+    for (const auto& uuid : ::aidl::android::hardware::drm::clearkey::getSupportedCryptoSchemes()) {
+        schemes.uuids.push_back({uuid});
+    }
+    for (auto mime : {kIsoBmffVideoMimeType, kIsoBmffAudioMimeType, kCencInitDataFormat,
+                      kWebmVideoMimeType, kWebmAudioMimeType, kWebmInitDataFormat}) {
+        const auto minLevel = SecurityLevel::SW_SECURE_CRYPTO;
+        const auto maxLevel = SecurityLevel::SW_SECURE_CRYPTO;
+        schemes.mimeTypes.push_back({mime, minLevel, maxLevel});
+    }
+    *_aidl_return = schemes;
+    return ndk::ScopedAStatus::ok();
+}
+
+binder_status_t DrmFactory::dump(int fd, const char** args, uint32_t numArgs) {
+    UNUSED(args);
+    UNUSED(numArgs);
+
+    if (fd < 0) {
+        ALOGE("%s: negative fd", __FUNCTION__);
+        return STATUS_BAD_VALUE;
+    }
+
+    uint32_t currentSessions = SessionLibrary::get()->numOpenSessions();
+    dprintf(fd, "current open sessions: %u\n", currentSessions);
+
+    return STATUS_OK;
+}
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+} // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
new file mode 100644
index 0000000..ea51e9d
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -0,0 +1,1029 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-DrmPlugin"
+
+#include <aidl/android/hardware/drm/DrmMetric.h>
+#include <utils/Log.h>
+
+#include <inttypes.h>
+#include <stdio.h>
+#include <chrono>
+#include <set>
+
+#include "AidlUtils.h"
+#include "ClearKeyDrmProperties.h"
+#include "DrmPlugin.h"
+#include "Session.h"
+#include "Utils.h"
+#include "AidlClearKeryProperties.h"
+
+namespace {
+const std::string kKeySetIdPrefix("ckid");
+const int kKeySetIdLength = 16;
+const int kSecureStopIdStart = 100;
+const std::string kOfflineLicense("\"type\":\"persistent-license\"");
+const std::string kStreaming("Streaming");
+const std::string kTemporaryLicense("\"type\":\"temporary\"");
+const std::string kTrue("True");
+
+const std::string kQueryKeyLicenseType("LicenseType");
+// Value: "Streaming" or "Offline"
+const std::string kQueryKeyPlayAllowed("PlayAllowed");
+// Value: "True" or "False"
+const std::string kQueryKeyRenewAllowed("RenewAllowed");
+// Value: "True" or "False"
+
+const int kSecureStopIdSize = 10;
+
+std::vector<uint8_t> uint32ToVector(uint32_t value) {
+    // 10 bytes to display max value 4294967295 + one byte null terminator
+    char buffer[kSecureStopIdSize];
+    memset(buffer, 0, kSecureStopIdSize);
+    snprintf(buffer, kSecureStopIdSize, "%" PRIu32, value);
+    return std::vector<uint8_t>(buffer, buffer + sizeof(buffer));
+}
+
+};  // unnamed namespace
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+using ::android::Mutex;
+
+DrmPlugin::DrmPlugin(SessionLibrary* sessionLibrary)
+    : mSessionLibrary(sessionLibrary),
+      mOpenSessionOkCount(0),
+      mCloseSessionOkCount(0),
+      mCloseSessionNotOpenedCount(0),
+      mNextSecureStopId(kSecureStopIdStart),
+      mMockError(Status::OK) {
+    mPlayPolicy.clear();
+    initProperties();
+    mSecureStops.clear();
+    mReleaseKeysMap.clear();
+    std::srand(std::time(nullptr));
+}
+
+void DrmPlugin::initProperties() {
+    mStringProperties.clear();
+    mStringProperties[kVendorKey] = kAidlVendorValue;
+    mStringProperties[kVersionKey] = kAidlVersionValue;
+    mStringProperties[kPluginDescriptionKey] = kAidlPluginDescriptionValue;
+    mStringProperties[kAlgorithmsKey] = kAidlAlgorithmsValue;
+    mStringProperties[kListenerTestSupportKey] = kAidlListenerTestSupportValue;
+    mStringProperties[kDrmErrorTestKey] = kAidlDrmErrorTestValue;
+    mStringProperties[kAidlVersionKey] = kAidlVersionValue;
+
+    std::vector<uint8_t> valueVector;
+    valueVector.clear();
+    valueVector.insert(valueVector.end(), kTestDeviceIdData,
+                       kTestDeviceIdData + sizeof(kTestDeviceIdData) / sizeof(uint8_t));
+    mByteArrayProperties[kDeviceIdKey] = valueVector;
+
+    valueVector.clear();
+    valueVector.insert(valueVector.end(), kMetricsData,
+                       kMetricsData + sizeof(kMetricsData) / sizeof(uint8_t));
+    mByteArrayProperties[kMetricsKey] = valueVector;
+}
+
+// The secure stop in ClearKey implementation is not installed securely.
+// This function merely creates a test environment for testing secure stops APIs.
+// The content in this secure stop is implementation dependent, the clearkey
+// secureStop does not serve as a reference implementation.
+void DrmPlugin::installSecureStop(const std::vector<uint8_t>& sessionId) {
+    ::android::Mutex::Autolock lock(mSecureStopLock);
+
+    ClearkeySecureStop clearkeySecureStop;
+    clearkeySecureStop.id = uint32ToVector(++mNextSecureStopId);
+    clearkeySecureStop.data.assign(sessionId.begin(), sessionId.end());
+
+    mSecureStops.insert(std::pair<std::vector<uint8_t>, ClearkeySecureStop>(clearkeySecureStop.id,
+                                                                            clearkeySecureStop));
+}
+
+::ndk::ScopedAStatus DrmPlugin::closeSession(const std::vector<uint8_t>& in_sessionId) {
+    if (in_sessionId.size() == 0) {
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    ::android::sp<Session> session = mSessionLibrary->findSession(in_sessionId);
+    if (session.get()) {
+        mSessionLibrary->destroySession(session);
+        if (session->getMockError() != clearkeydrm::OK) {
+            sendSessionLostState(in_sessionId);
+            return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE);
+        }
+        mCloseSessionOkCount++;
+        return toNdkScopedAStatus(Status::OK);
+    }
+    mCloseSessionNotOpenedCount++;
+    return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
+}
+
+::ndk::ScopedAStatus DrmPlugin::decrypt(const std::vector<uint8_t>& in_sessionId,
+                                        const std::vector<uint8_t>& in_keyId,
+                                        const std::vector<uint8_t>& in_input,
+                                        const std::vector<uint8_t>& in_iv,
+                                        std::vector<uint8_t>* _aidl_return) {
+    *_aidl_return = {};
+    if (in_sessionId.size() == 0 || in_keyId.size() == 0 || in_input.size() == 0 ||
+        in_iv.size() == 0) {
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::encrypt(const std::vector<uint8_t>& in_sessionId,
+                                        const std::vector<uint8_t>& in_keyId,
+                                        const std::vector<uint8_t>& in_input,
+                                        const std::vector<uint8_t>& in_iv,
+                                        std::vector<uint8_t>* _aidl_return) {
+    *_aidl_return = {};
+    if (in_sessionId.size() == 0 || in_keyId.size() == 0 || in_input.size() == 0 ||
+        in_iv.size() == 0) {
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getHdcpLevels(
+        ::aidl::android::hardware::drm::HdcpLevels* _aidl_return) {
+    _aidl_return->connectedLevel = HdcpLevel::HDCP_NONE;
+    _aidl_return->maxLevel = HdcpLevel::HDCP_NO_OUTPUT;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getKeyRequest(
+        const std::vector<uint8_t>& in_scope, const std::vector<uint8_t>& in_initData,
+        const std::string& in_mimeType, ::aidl::android::hardware::drm::KeyType in_keyType,
+        const std::vector<::aidl::android::hardware::drm::KeyValue>& in_optionalParameters,
+        ::aidl::android::hardware::drm::KeyRequest* _aidl_return) {
+    UNUSED(in_optionalParameters);
+
+    KeyRequestType keyRequestType = KeyRequestType::UNKNOWN;
+    std::string defaultUrl("https://default.url");
+
+    _aidl_return->request = {};
+    _aidl_return->requestType = keyRequestType;
+    _aidl_return->defaultUrl = defaultUrl;
+
+    if (in_scope.size() == 0 ||
+        (in_keyType != KeyType::STREAMING && in_keyType != KeyType::OFFLINE &&
+         in_keyType != KeyType::RELEASE)) {
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    const std::vector<uint8_t> scopeId = in_scope;
+    ::android::sp<Session> session;
+    std::set<KeyType> init_types{KeyType::STREAMING, KeyType::OFFLINE};
+    if (init_types.count(in_keyType)) {
+        std::vector<uint8_t> sessionId(scopeId.begin(), scopeId.end());
+        session = mSessionLibrary->findSession(sessionId);
+        if (!session.get()) {
+            return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
+        } else if (session->getMockError() != clearkeydrm::OK) {
+            return toNdkScopedAStatus(session->getMockError());
+        }
+        keyRequestType = KeyRequestType::INITIAL;
+    }
+
+    std::vector<uint8_t> request = {};
+    auto keyType = static_cast<CdmKeyType>(in_keyType);
+    auto status = session->getKeyRequest(in_initData, in_mimeType, keyType, &request);
+
+    if (in_keyType == KeyType::RELEASE) {
+        std::vector<uint8_t> keySetId(scopeId.begin(), scopeId.end());
+        std::string requestString(request.begin(), request.end());
+        if (requestString.find(kOfflineLicense) != std::string::npos) {
+            std::string emptyResponse;
+            std::string keySetIdString(keySetId.begin(), keySetId.end());
+            if (!mFileHandle.StoreLicense(keySetIdString, DeviceFiles::kLicenseStateReleasing,
+                                          emptyResponse)) {
+                ALOGE("Problem releasing offline license");
+                return toNdkScopedAStatus(Status::ERROR_DRM_UNKNOWN);
+            }
+            if (mReleaseKeysMap.find(keySetIdString) == mReleaseKeysMap.end()) {
+                ::android::sp<Session> session = mSessionLibrary->createSession();
+                mReleaseKeysMap[keySetIdString] = session->sessionId();
+            } else {
+                ALOGI("key is in use, ignore release request");
+            }
+        } else {
+            ALOGE("Offline license not found, nothing to release");
+        }
+        keyRequestType = KeyRequestType::RELEASE;
+    }
+    _aidl_return->request = request;
+    _aidl_return->requestType = keyRequestType;
+    _aidl_return->defaultUrl = defaultUrl;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getLogMessages(
+        std::vector<::aidl::android::hardware::drm::LogMessage>* _aidl_return) {
+    using std::chrono::duration_cast;
+    using std::chrono::milliseconds;
+    using std::chrono::system_clock;
+
+    auto timeMillis = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
+
+    std::vector<::aidl::android::hardware::drm::LogMessage> logs = {
+            {timeMillis, ::aidl::android::hardware::drm::LogPriority::ERROR,
+             std::string("Not implemented")}};
+    *_aidl_return = logs;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getMetrics(
+        std::vector<::aidl::android::hardware::drm::DrmMetricGroup>* _aidl_return) {
+    // Set the open session count metric.
+    DrmMetricNamedValue openSessionOkAttribute = {"status", static_cast<int64_t>(Status::OK)};
+    DrmMetricNamedValue openSessionMetricValue = {"count", mOpenSessionOkCount};
+    DrmMetric openSessionMetric = {
+            "open_session", {openSessionOkAttribute}, {openSessionMetricValue}};
+
+    // Set the close session count metric.
+    DrmMetricNamedValue closeSessionOkAttribute = {"status", static_cast<int64_t>(Status::OK)};
+    DrmMetricNamedValue closeSessionMetricValue = {"count", mCloseSessionOkCount};
+    DrmMetric closeSessionMetric = {
+            "close_session", {closeSessionOkAttribute}, {closeSessionMetricValue}};
+
+    // Set the close session, not opened metric.
+    DrmMetricNamedValue closeSessionNotOpenedAttribute = {"status",
+            static_cast<int64_t>(Status::ERROR_DRM_SESSION_NOT_OPENED)};
+    DrmMetricNamedValue closeSessionNotOpenedMetricValue = {"count", mCloseSessionNotOpenedCount};
+    DrmMetric closeSessionNotOpenedMetric = {
+            "close_session", {closeSessionNotOpenedAttribute}, {closeSessionNotOpenedMetricValue}};
+
+    // Set the setPlaybackId metric.
+    std::vector<DrmMetricNamedValue> sids = {};
+    std::vector<DrmMetricNamedValue> playbackIds = {};
+    for (const auto& [key, value] : mPlaybackId) {
+        std::string sid(key.begin(), key.end());
+        DrmMetricNamedValue sessionIdAttribute = {"sid", sid};
+        sids.push_back(sessionIdAttribute);
+
+        DrmMetricNamedValue playbackIdMetricValue = {"playbackId", value};
+        playbackIds.push_back(playbackIdMetricValue);
+    }
+    DrmMetric setPlaybackIdMetric = {"set_playback_id", sids, playbackIds};
+
+    DrmMetricGroup metrics = {{openSessionMetric, closeSessionMetric, closeSessionNotOpenedMetric,
+            setPlaybackIdMetric}};
+
+    *_aidl_return = {metrics};
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getNumberOfSessions(
+        ::aidl::android::hardware::drm::NumberOfSessions* _aidl_return) {
+    _aidl_return->currentSessions = mSessionLibrary->numOpenSessions();
+    _aidl_return->maxSessions = 10;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getOfflineLicenseKeySetIds(
+        std::vector<::aidl::android::hardware::drm::KeySetId>* _aidl_return) {
+    std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
+    std::vector<KeySetId> keySetIds = {};
+    if (mMockError != Status::OK) {
+        *_aidl_return = keySetIds;
+        return toNdkScopedAStatus(toMockStatus(mMockError));
+    }
+    for (const auto& name : licenseNames) {
+        std::vector<uint8_t> keySetId(name.begin(), name.end());
+        KeySetId id = {};
+        id.keySetId = keySetId;
+        keySetIds.push_back(id);
+    }
+    *_aidl_return = keySetIds;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getOfflineLicenseState(
+        const ::aidl::android::hardware::drm::KeySetId& in_keySetId,
+        ::aidl::android::hardware::drm::OfflineLicenseState* _aidl_return) {
+    std::string licenseName(in_keySetId.keySetId.begin(), in_keySetId.keySetId.end());
+    DeviceFiles::LicenseState state;
+    std::string license;
+    OfflineLicenseState licenseState = OfflineLicenseState::UNKNOWN;
+    Status status = Status::OK;
+    if (mMockError != Status::OK) {
+        *_aidl_return = licenseState;
+        return toNdkScopedAStatus(toMockStatus(mMockError));
+    } else if (mFileHandle.RetrieveLicense(licenseName, &state, &license)) {
+        switch (state) {
+            case DeviceFiles::kLicenseStateActive:
+                licenseState = OfflineLicenseState::USABLE;
+                break;
+            case DeviceFiles::kLicenseStateReleasing:
+                licenseState = OfflineLicenseState::INACTIVE;
+                break;
+            case DeviceFiles::kLicenseStateUnknown:
+                licenseState = OfflineLicenseState::UNKNOWN;
+                break;
+        }
+    } else {
+        status = Status::BAD_VALUE;
+    }
+    *_aidl_return = licenseState;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getPropertyByteArray(const std::string& in_propertyName,
+                                                     std::vector<uint8_t>* _aidl_return) {
+    std::map<std::string, std::vector<uint8_t>>::iterator itr =
+            mByteArrayProperties.find(std::string(in_propertyName.c_str()));
+    Status status = Status::OK;
+    if (itr != mByteArrayProperties.end()) {
+        *_aidl_return = itr->second;
+    } else {
+        ALOGE("App requested unknown property: %s", in_propertyName.c_str());
+        status = Status::BAD_VALUE;
+        *_aidl_return = {};
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getPropertyString(const std::string& in_propertyName,
+                                                  std::string* _aidl_return) {
+    std::string name(in_propertyName.c_str());
+    std::string value;
+    Status status = Status::OK;
+
+    if (name == kVendorKey) {
+        value = mStringProperties[kVendorKey];
+    } else if (name == kVersionKey) {
+        value = mStringProperties[kVersionKey];
+    } else if (name == kPluginDescriptionKey) {
+        value = mStringProperties[kPluginDescriptionKey];
+    } else if (name == kAlgorithmsKey) {
+        value = mStringProperties[kAlgorithmsKey];
+    } else if (name == kListenerTestSupportKey) {
+        value = mStringProperties[kListenerTestSupportKey];
+    } else if (name == kDrmErrorTestKey) {
+        value = mStringProperties[kDrmErrorTestKey];
+    } else if (name == kAidlVersionKey) {
+        value = mStringProperties[kAidlVersionKey];
+    } else {
+        ALOGE("App requested unknown string property %s", name.c_str());
+        status = Status::ERROR_DRM_CANNOT_HANDLE;
+    }
+    *_aidl_return = value;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getProvisionRequest(
+        const std::string& in_certificateType, const std::string& in_certificateAuthority,
+        ::aidl::android::hardware::drm::ProvisionRequest* _aidl_return) {
+    UNUSED(in_certificateType);
+    UNUSED(in_certificateAuthority);
+    _aidl_return->request = {};
+    _aidl_return->defaultUrl = {};
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getSecureStop(
+        const ::aidl::android::hardware::drm::SecureStopId& in_secureStopId,
+        ::aidl::android::hardware::drm::SecureStop* _aidl_return) {
+    std::vector<uint8_t> stop = {};
+
+    mSecureStopLock.lock();
+    auto itr = mSecureStops.find(in_secureStopId.secureStopId);
+    if (itr != mSecureStops.end()) {
+        ClearkeySecureStop clearkeyStop = itr->second;
+        stop.insert(stop.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
+        stop.insert(stop.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
+    }
+    mSecureStopLock.unlock();
+
+    SecureStop secureStop = {};
+    Status status = Status::OK;
+    if (!stop.empty()) {
+        secureStop.opaqueData = stop;
+    } else {
+        status = Status::BAD_VALUE;
+    }
+    *_aidl_return = secureStop;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getSecureStopIds(
+        std::vector<::aidl::android::hardware::drm::SecureStopId>* _aidl_return) {
+    mSecureStopLock.lock();
+    std::vector<::aidl::android::hardware::drm::SecureStopId> ids;
+    for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
+        SecureStopId id;
+        id.secureStopId = itr->first;
+        ids.push_back(id);
+    }
+    mSecureStopLock.unlock();
+
+    *_aidl_return = ids;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getSecureStops(
+        std::vector<::aidl::android::hardware::drm::SecureStop>* _aidl_return) {
+    mSecureStopLock.lock();
+    std::vector<::aidl::android::hardware::drm::SecureStop> stops;
+    for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
+        ClearkeySecureStop clearkeyStop = itr->second;
+        std::vector<uint8_t> stop{};
+        stop.insert(stop.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
+        stop.insert(stop.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
+
+        SecureStop secureStop;
+        secureStop.opaqueData = stop;
+        stops.push_back(secureStop);
+    }
+    mSecureStopLock.unlock();
+
+    *_aidl_return = stops;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::getSecurityLevel(
+        const std::vector<uint8_t>& in_sessionId,
+        ::aidl::android::hardware::drm::SecurityLevel* _aidl_return) {
+    if (in_sessionId.size() == 0) {
+        *_aidl_return = ::aidl::android::hardware::drm::SecurityLevel::UNKNOWN;
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::vector<uint8_t> sid = in_sessionId;
+    ::android::sp<Session> session = mSessionLibrary->findSession(sid);
+    if (!session.get()) {
+        *_aidl_return = SecurityLevel::UNKNOWN;
+        return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
+    }
+
+    std::map<std::vector<uint8_t>, ::aidl::android::hardware::drm::SecurityLevel>::iterator itr =
+            mSecurityLevel.find(sid);
+    if (itr == mSecurityLevel.end()) {
+        ALOGE("Session id not found");
+        *_aidl_return = SecurityLevel::UNKNOWN;
+        return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE);
+    }
+
+    *_aidl_return = SecurityLevel::SW_SECURE_CRYPTO;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::openSession(
+        ::aidl::android::hardware::drm::SecurityLevel in_securityLevel,
+        std::vector<uint8_t>* _aidl_return) {
+    ::android::sp<Session> session = mSessionLibrary->createSession();
+    processMockError(session);
+    std::vector<uint8_t> sessionId = session->sessionId();
+
+    Status status = setSecurityLevel(sessionId, in_securityLevel);
+    if (status == Status::OK) {
+        mOpenSessionOkCount++;
+    } else {
+        mSessionLibrary->destroySession(session);
+        sessionId.clear();
+    }
+    *_aidl_return = sessionId;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::provideKeyResponse(
+        const std::vector<uint8_t>& in_scope, const std::vector<uint8_t>& in_response,
+        ::aidl::android::hardware::drm::KeySetId* _aidl_return) {
+    if (in_scope.size() == 0 || in_response.size() == 0) {
+        // Returns empty keySetId
+        *_aidl_return = {};
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::string responseString(reinterpret_cast<const char*>(in_response.data()),
+                               in_response.size());
+    const std::vector<uint8_t> scopeId = in_scope;
+    std::vector<uint8_t> sessionId = {};
+    std::string keySetId;
+
+    bool isOfflineLicense = responseString.find(kOfflineLicense) != std::string::npos;
+    if (scopeId.size() < kKeySetIdPrefix.size()) {
+        android_errorWriteLog(0x534e4554, "144507096");
+        *_aidl_return = {};
+        return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+    }
+    bool isRelease = (memcmp(scopeId.data(), kKeySetIdPrefix.data(), kKeySetIdPrefix.size()) == 0);
+    if (isRelease) {
+        keySetId.assign(scopeId.begin(), scopeId.end());
+
+        auto iter = mReleaseKeysMap.find(std::string(keySetId.begin(), keySetId.end()));
+        if (iter != mReleaseKeysMap.end()) {
+            sessionId.assign(iter->second.begin(), iter->second.end());
+        }
+    } else {
+        sessionId.assign(scopeId.begin(), scopeId.end());
+        // non offline license returns empty keySetId
+        keySetId.clear();
+    }
+
+    ::android::sp<Session> session = mSessionLibrary->findSession(sessionId);
+    if (!session.get()) {
+        *_aidl_return = {};
+        return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
+    }
+    setPlayPolicy();
+
+    auto res = session->provideKeyResponse(in_response);
+    if (res == clearkeydrm::OK) {
+        if (isOfflineLicense) {
+            if (isRelease) {
+                mFileHandle.DeleteLicense(keySetId);
+                mSessionLibrary->destroySession(session);
+            } else {
+                if (!makeKeySetId(&keySetId)) {
+                    *_aidl_return = {};
+                    return toNdkScopedAStatus(Status::ERROR_DRM_UNKNOWN);
+                }
+
+                bool ok = mFileHandle.StoreLicense(
+                        keySetId, DeviceFiles::kLicenseStateActive,
+                        std::string(in_response.begin(), in_response.end()));
+                if (!ok) {
+                    ALOGE("Failed to store offline license");
+                }
+            }
+        }
+
+        // Test calling AMediaDrm listeners.
+        sendEvent(EventType::VENDOR_DEFINED, sessionId, sessionId);
+
+        sendExpirationUpdate(sessionId, 100);
+
+        std::vector<KeyStatus> keysStatus = {};
+        KeyStatus keyStatus;
+
+        std::vector<uint8_t> keyId1 = {0xA, 0xB, 0xC};
+        keyStatus.keyId = keyId1;
+        keyStatus.type = KeyStatusType::USABLE;
+        keysStatus.push_back(keyStatus);
+
+        std::vector<uint8_t> keyId2 = {0xD, 0xE, 0xF};
+        keyStatus.keyId = keyId2;
+        keyStatus.type = KeyStatusType::EXPIRED;
+        keysStatus.push_back(keyStatus);
+
+        std::vector<uint8_t> keyId3 = {0x0, 0x1, 0x2};
+        keyStatus.keyId = keyId3;
+        keyStatus.type = KeyStatusType::USABLE_IN_FUTURE;
+        keysStatus.push_back(keyStatus);
+
+        sendKeysChange(sessionId, keysStatus, true);
+
+        installSecureStop(sessionId);
+    } else {
+        ALOGE("provideKeyResponse returns error=%d", res);
+    }
+
+    std::vector<uint8_t> keySetIdVec(keySetId.begin(), keySetId.end());
+    _aidl_return->keySetId = keySetIdVec;
+    return toNdkScopedAStatus(res);
+}
+
+::ndk::ScopedAStatus DrmPlugin::provideProvisionResponse(
+        const std::vector<uint8_t>& in_response,
+        ::aidl::android::hardware::drm::ProvideProvisionResponseResult* _aidl_return) {
+    Status status = Status::ERROR_DRM_CANNOT_HANDLE;
+    _aidl_return->certificate = {};
+    _aidl_return->wrappedKey = {};
+    if (in_response.size() == 0) {
+        status = Status::BAD_VALUE;
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::queryKeyStatus(
+        const std::vector<uint8_t>& in_sessionId,
+        std::vector<::aidl::android::hardware::drm::KeyValue>* _aidl_return) {
+    if (in_sessionId.size() == 0) {
+        // Returns empty key status KeyValue pair
+        *_aidl_return = {};
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::vector<KeyValue> infoMap = {};
+    mPlayPolicyLock.lock();
+    KeyValue keyValuePair;
+    for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
+        keyValuePair.key = mPlayPolicy[i].key;
+        keyValuePair.value = mPlayPolicy[i].value;
+        infoMap.push_back(keyValuePair);
+    }
+    mPlayPolicyLock.unlock();
+    *_aidl_return = infoMap;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::releaseAllSecureStops() {
+    Status status = Status::OK;
+    const auto res = removeAllSecureStops();
+    if (!res.isOk() && res.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+        status = static_cast<Status>(res.getServiceSpecificError());
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::releaseSecureStop(
+        const ::aidl::android::hardware::drm::SecureStopId& in_secureStopId) {
+    Status status = Status::OK;
+    const auto res = removeSecureStop(in_secureStopId);
+    if (!res.isOk() && res.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+        status = static_cast<Status>(res.getServiceSpecificError());
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::releaseSecureStops(
+        const ::aidl::android::hardware::drm::OpaqueData& in_ssRelease) {
+    // OpaqueData starts with 4 byte decimal integer string
+    const size_t kFourBytesOffset = 4;
+    if (in_ssRelease.opaqueData.size() < kFourBytesOffset) {
+        ALOGE("Invalid secureStopRelease length");
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    Status status = Status::OK;
+    std::vector<uint8_t> input = in_ssRelease.opaqueData;
+
+    if (input.size() < kSecureStopIdSize + kFourBytesOffset) {
+        // The minimum size of secure stop has to contain
+        // a 4 bytes count and one secureStop id
+        ALOGE("Total size of secureStops is too short");
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    // The format of opaqueData is shared between the server
+    // and the drm service. The clearkey implementation consists of:
+    //    count - number of secure stops
+    //    list of fixed length secure stops
+    size_t countBufferSize = sizeof(uint32_t);
+    if (input.size() < countBufferSize) {
+        // SafetyNet logging
+        android_errorWriteLog(0x534e4554, "144766455");
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+    uint32_t count = 0;
+    sscanf(reinterpret_cast<char*>(input.data()), "%04" PRIu32, &count);
+
+    // Avoid divide by 0 below.
+    if (count == 0) {
+        ALOGE("Invalid 0 secureStop count");
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    // Computes the fixed length secureStop size
+    size_t secureStopSize = (input.size() - kFourBytesOffset) / count;
+    if (secureStopSize < kSecureStopIdSize) {
+        // A valid secureStop contains the id plus data
+        ALOGE("Invalid secureStop size");
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+    uint8_t* buffer = new uint8_t[secureStopSize];
+    size_t offset = kFourBytesOffset;  // skip the count
+    for (size_t i = 0; i < count; ++i, offset += secureStopSize) {
+        memcpy(buffer, input.data() + offset, secureStopSize);
+
+        // A secureStop contains id+data, we only use the id for removal
+        std::vector<uint8_t> id(buffer, buffer + kSecureStopIdSize);
+        ::aidl::android::hardware::drm::SecureStopId secureStopId{id};
+        const auto res = removeSecureStop(secureStopId);
+        if (!res.isOk() && res.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            status = static_cast<Status>(res.getServiceSpecificError());
+        }
+        if (Status::OK != status) break;
+    }
+
+    delete[] buffer;
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::removeAllSecureStops() {
+    Mutex::Autolock lock(mSecureStopLock);
+
+    mSecureStops.clear();
+    mNextSecureStopId = kSecureStopIdStart;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::removeKeys(const std::vector<uint8_t>& in_sessionId) {
+    Status status = Status::ERROR_DRM_CANNOT_HANDLE;
+    if (in_sessionId.size() == 0) {
+        status = Status::BAD_VALUE;
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::removeOfflineLicense(
+        const ::aidl::android::hardware::drm::KeySetId& in_keySetId) {
+    if (mMockError != Status::OK) {
+        return toNdkScopedAStatus(toMockStatus(mMockError));
+    }
+    Status status = Status::BAD_VALUE;
+    std::string licenseName(in_keySetId.keySetId.begin(), in_keySetId.keySetId.end());
+    if (mFileHandle.DeleteLicense(licenseName)) {
+        status = Status::OK;
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::removeSecureStop(
+        const ::aidl::android::hardware::drm::SecureStopId& in_secureStopId) {
+    Mutex::Autolock lock(mSecureStopLock);
+
+    Status status = Status::OK;
+    if (1 != mSecureStops.erase(in_secureStopId.secureStopId)) {
+        status = Status::BAD_VALUE;
+    }
+    return toNdkScopedAStatus(status);
+}
+
+::ndk::ScopedAStatus DrmPlugin::requiresSecureDecoder(
+        const std::string& in_mime, ::aidl::android::hardware::drm::SecurityLevel in_level,
+        bool* _aidl_return) {
+    UNUSED(in_mime);
+    UNUSED(in_level);
+    *_aidl_return = false;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmPlugin::restoreKeys(
+        const std::vector<uint8_t>& in_sessionId,
+        const ::aidl::android::hardware::drm::KeySetId& in_keySetId) {
+    if (in_sessionId.size() == 0 || in_keySetId.keySetId.size() == 0) {
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    DeviceFiles::LicenseState licenseState;
+    std::string offlineLicense;
+    if (!mFileHandle.RetrieveLicense(
+                std::string(in_keySetId.keySetId.begin(), in_keySetId.keySetId.end()),
+                &licenseState, &offlineLicense)) {
+        ALOGE("Failed to restore offline license");
+        return toNdkScopedAStatus(Status::ERROR_DRM_NO_LICENSE);
+    }
+
+    if (DeviceFiles::kLicenseStateUnknown == licenseState ||
+        DeviceFiles::kLicenseStateReleasing == licenseState) {
+        ALOGE("Invalid license state=%d", licenseState);
+        return toNdkScopedAStatus(Status::ERROR_DRM_NO_LICENSE);
+    }
+
+    ::android::sp<Session> session = mSessionLibrary->findSession(in_sessionId);
+    if (!session.get()) {
+        return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
+    }
+    auto res = session->provideKeyResponse(
+            std::vector<uint8_t>(offlineLicense.begin(), offlineLicense.end()));
+    if (res != clearkeydrm::OK) {
+        ALOGE("Failed to restore keys");
+    }
+    return toNdkScopedAStatus(res);
+}
+
+void DrmPlugin::sendEvent(::aidl::android::hardware::drm::EventType in_eventType,
+                                          const std::vector<uint8_t>& in_sessionId,
+                                          const std::vector<uint8_t>& in_data) {
+    if (mListener != nullptr) {
+        mListener->onEvent(in_eventType, in_sessionId, in_data);
+    } else {
+        ALOGE("Null event listener, event not sent");
+    }
+    return;
+}
+
+void DrmPlugin::sendExpirationUpdate(const std::vector<uint8_t>& in_sessionId,
+                                                     int64_t in_expiryTimeInMS) {
+    if (mListener != nullptr) {
+        mListener->onExpirationUpdate(in_sessionId, in_expiryTimeInMS);
+    } else {
+        ALOGE("Null event listener, event not sent");
+    }
+    return;
+}
+
+void DrmPlugin::sendKeysChange(
+        const std::vector<uint8_t>& in_sessionId,
+        const std::vector<::aidl::android::hardware::drm::KeyStatus>& in_keyStatusList,
+        bool in_hasNewUsableKey) {
+    if (mListener != nullptr) {
+        mListener->onKeysChange(in_sessionId, in_keyStatusList, in_hasNewUsableKey);
+    } else {
+        ALOGE("Null event listener, event not sent");
+    }
+    return;
+}
+
+void DrmPlugin::sendSessionLostState(const std::vector<uint8_t>& in_sessionId) {
+    if (mListener != nullptr) {
+        mListener->onSessionLostState(in_sessionId);
+    }
+    return;
+}
+
+::ndk::ScopedAStatus DrmPlugin::setCipherAlgorithm(const std::vector<uint8_t>& /*in_sessionId*/,
+                                                   const std::string& /*in_algorithm*/) {
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::setListener(
+        const std::shared_ptr<
+                ::aidl::android::hardware::drm::IDrmPluginListener>& in_listener) {
+    mListener = in_listener;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::setMacAlgorithm(const std::vector<uint8_t>& /*in_sessionId*/,
+                                                const std::string& /*in_algorithm*/) {
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::setPlaybackId(const std::vector<uint8_t>& in_sessionId,
+                                              const std::string& in_playbackId) {
+    if (in_sessionId.size() == 0) {
+        ALOGE("Invalid empty session id");
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::vector<uint8_t> sid = in_sessionId;
+    mPlaybackId[sid] = in_playbackId;
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::setPropertyByteArray(const std::string& in_propertyName,
+                                                     const std::vector<uint8_t>& in_value) {
+    if (in_propertyName == kDeviceIdKey) {
+        ALOGD("Cannot set immutable property: %s", in_propertyName.c_str());
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    } else if (in_propertyName == kClientIdKey) {
+        mByteArrayProperties[kClientIdKey] = in_value;
+        return toNdkScopedAStatus(Status::OK);
+    }
+
+    // Setting of undefined properties is not supported
+    ALOGE("Failed to set property byte array, key=%s", in_propertyName.c_str());
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::setPropertyString(const std::string& in_propertyName,
+                                                  const std::string& in_value) {
+    std::string immutableKeys;
+    immutableKeys.append(kAlgorithmsKey + ",");
+    immutableKeys.append(kPluginDescriptionKey + ",");
+    immutableKeys.append(kVendorKey + ",");
+    immutableKeys.append(kVersionKey + ",");
+
+    std::string key = std::string(in_propertyName.c_str());
+    if (immutableKeys.find(key) != std::string::npos) {
+        ALOGD("Cannot set immutable property: %s", key.c_str());
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    std::map<std::string, std::string>::iterator itr = mStringProperties.find(key);
+    if (itr == mStringProperties.end()) {
+        ALOGE("Cannot set undefined property string, key=%s", key.c_str());
+        return toNdkScopedAStatus(Status::BAD_VALUE);
+    }
+
+    if (in_propertyName == kDrmErrorTestKey) {
+        if (in_value == kResourceContentionValue) {
+            mMockError = Status::ERROR_DRM_RESOURCE_CONTENTION;
+        } else if (in_value == kLostStateValue) {
+            mMockError = Status::ERROR_DRM_SESSION_LOST_STATE;
+        } else if (in_value == kFrameTooLargeValue) {
+            mMockError = Status::ERROR_DRM_FRAME_TOO_LARGE;
+        } else if (in_value == kInvalidStateValue) {
+            mMockError = Status::ERROR_DRM_INVALID_STATE;
+        } else {
+            mMockError = Status::ERROR_DRM_UNKNOWN;
+        }
+    }
+
+    mStringProperties[key] = std::string(in_value.c_str());
+    return toNdkScopedAStatus(Status::OK);
+}
+
+::ndk::ScopedAStatus DrmPlugin::sign(const std::vector<uint8_t>& /*in_sessionId*/,
+                                     const std::vector<uint8_t>& /*in_keyId*/,
+                                     const std::vector<uint8_t>& /*in_message*/,
+                                     std::vector<uint8_t>* _aidl_return) {
+    *_aidl_return = {};
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::signRSA(const std::vector<uint8_t>& /*in_sessionId*/,
+                                        const std::string& /*in_algorithm*/,
+                                        const std::vector<uint8_t>& /*in_message*/,
+                                        const std::vector<uint8_t>& /*in_wrappedkey*/,
+                                        std::vector<uint8_t>* _aidl_return) {
+    *_aidl_return = {};
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+::ndk::ScopedAStatus DrmPlugin::verify(const std::vector<uint8_t>& /*in_sessionId*/,
+                                       const std::vector<uint8_t>& /*in_keyId*/,
+                                       const std::vector<uint8_t>& /*in_message*/,
+                                       const std::vector<uint8_t>& /*in_signature*/,
+                                       bool* _aidl_return) {
+    *_aidl_return = false;
+    return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE);
+}
+
+// Private methods below.
+void DrmPlugin::setPlayPolicy() {
+    ::android::Mutex::Autolock lock(mPlayPolicyLock);
+    mPlayPolicy.clear();
+
+    KeyValue policy;
+    policy.key = kQueryKeyLicenseType;
+    policy.value = kStreaming;
+    mPlayPolicy.push_back(policy);
+
+    policy.key = kQueryKeyPlayAllowed;
+    policy.value = kTrue;
+    mPlayPolicy.push_back(policy);
+
+    policy.key = kQueryKeyRenewAllowed;
+    mPlayPolicy.push_back(policy);
+}
+
+bool DrmPlugin::makeKeySetId(std::string* keySetId) {
+    if (!keySetId) {
+        ALOGE("keySetId destination not provided");
+        return false;
+    }
+    std::vector<uint8_t> ksid(kKeySetIdPrefix.begin(), kKeySetIdPrefix.end());
+    ksid.resize(kKeySetIdLength);
+    std::vector<uint8_t> randomData((kKeySetIdLength - kKeySetIdPrefix.size()) / 2, 0);
+
+    while (keySetId->empty()) {
+        for (auto itr = randomData.begin(); itr != randomData.end(); ++itr) {
+            *itr = std::rand() % 0xff;
+        }
+        auto id = reinterpret_cast<const uint8_t*>(randomData.data());
+        *keySetId = kKeySetIdPrefix + ::android::ByteArrayToHexString(id, randomData.size());
+        if (mFileHandle.LicenseExists(*keySetId)) {
+            // collision, regenerate
+            ALOGV("Retry generating KeySetId");
+            keySetId->clear();
+        }
+    }
+    return true;
+}
+
+Status DrmPlugin::setSecurityLevel(const std::vector<uint8_t>& sessionId, SecurityLevel level) {
+    if (sessionId.size() == 0) {
+        ALOGE("Invalid empty session id");
+        return Status::BAD_VALUE;
+    }
+
+    if (level != SecurityLevel::DEFAULT && level != SecurityLevel::SW_SECURE_CRYPTO) {
+        ALOGE("Cannot set security level > max");
+        return Status::ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    std::vector<uint8_t> sid = sessionId;
+    ::android::sp<Session> session = mSessionLibrary->findSession(sid);
+    if (!session.get()) {
+        return Status::ERROR_DRM_SESSION_NOT_OPENED;
+    }
+
+    std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr = mSecurityLevel.find(sid);
+    if (itr != mSecurityLevel.end()) {
+        mSecurityLevel[sid] = level;
+    } else {
+        if (!mSecurityLevel.insert(std::pair<std::vector<uint8_t>, SecurityLevel>(sid, level))
+                     .second) {
+            ALOGE("Failed to set security level");
+            return Status::ERROR_DRM_INVALID_STATE;
+        }
+    }
+    return Status::OK;
+}
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Service.cpp b/drm/mediadrm/plugins/clearkey/aidl/Service.cpp
new file mode 100644
index 0000000..0b07864
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/Service.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_NDEBUG 1
+#define LOG_TAG "clearkey-main"
+
+#include "CreatePluginFactories.h"
+
+#include <android-base/logging.h>
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+using ::android::base::InitLogging;
+using ::android::base::LogdLogger;
+
+using ::aidl::android::hardware::drm::clearkey::createDrmFactory;
+using ::aidl::android::hardware::drm::clearkey::DrmFactory;
+
+int main(int /*argc*/, char* argv[]) {
+    InitLogging(argv, LogdLogger());
+    ::android::base::SetMinimumLogSeverity(::android::base::VERBOSE);
+    ABinderProcess_setThreadPoolMaxThreadCount(8);
+
+    std::shared_ptr<DrmFactory> drmFactory = createDrmFactory();
+    const std::string drmInstance = std::string() + DrmFactory::descriptor + "/clearkey";
+    binder_status_t status =
+            AServiceManager_addService(drmFactory->asBinder().get(), drmInstance.c_str());
+    CHECK(status == STATUS_OK);
+
+    ABinderProcess_joinThreadPool();
+    return EXIT_FAILURE;  // should not reached
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/ServiceLazy.cpp b/drm/mediadrm/plugins/clearkey/aidl/ServiceLazy.cpp
new file mode 100644
index 0000000..c3a33bb
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/ServiceLazy.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_NDEBUG 1
+#define LOG_TAG "clearkey-main"
+
+#include "CreatePluginFactories.h"
+
+#include <android-base/logging.h>
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+using ::android::base::InitLogging;
+using ::android::base::LogdLogger;
+
+using ::aidl::android::hardware::drm::clearkey::createDrmFactory;
+using ::aidl::android::hardware::drm::clearkey::DrmFactory;
+
+int main(int /*argc*/, char* argv[]) {
+    InitLogging(argv, LogdLogger());
+    ::android::base::SetMinimumLogSeverity(::android::base::VERBOSE);
+    ABinderProcess_setThreadPoolMaxThreadCount(8);
+
+    std::shared_ptr<DrmFactory> drmFactory = createDrmFactory();
+    const std::string drmInstance = std::string() + DrmFactory::descriptor + "/clearkey";
+    binder_status_t status =
+            AServiceManager_registerLazyService(drmFactory->asBinder().get(), drmInstance.c_str());
+    CHECK(status == STATUS_OK);
+
+    ABinderProcess_joinThreadPool();
+    return EXIT_FAILURE;  // should not reached
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
new file mode 100644
index 0000000..c87aabc
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
@@ -0,0 +1,9 @@
+service vendor.drm-clearkey-service /vendor/bin/hw/android.hardware.drm-service-lazy.clearkey
+    oneshot
+    disabled
+    class hal
+    user media
+    group mediadrm drmrpc
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh
+    interface aidl android.hardware.drm.IDrmFactory/clearkey
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.rc
new file mode 100644
index 0000000..e9252cd
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.rc
@@ -0,0 +1,7 @@
+service vendor.drm-clearkey-service /vendor/bin/hw/android.hardware.drm-service.clearkey
+    class hal
+    user media
+    group mediadrm drmrpc
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh
+    interface aidl android.hardware.drm.IDrmFactory/clearkey
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.xml
new file mode 100644
index 0000000..ac3e922
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.xml
@@ -0,0 +1,7 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.drm</name>
+        <version>1</version>
+        <fqname>IDrmFactory/clearkey</fqname>
+    </hal>
+</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h b/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
new file mode 100644
index 0000000..fb2cceb
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef AIDL_CLEARKEY_PROPERTIES_H
+#define AIDL_CLEARKEY_PROPERTIES_H
+#include <string>
+
+namespace clearkeydrm {
+static const std::string kAidlVendorValue("Google");
+static const std::string kAidlVersionValue("aidl-1");
+static const std::string kAidlPluginDescriptionValue("ClearKey CDM");
+static const std::string kAidlAlgorithmsValue("");
+static const std::string kAidlListenerTestSupportValue("true");
+
+static const std::string kAidlDrmErrorTestValue("");
+static const std::string kAidlResourceContentionValue("resourceContention");
+static const std::string kAidlLostStateValue("lostState");
+static const std::string kAidlFrameTooLargeValue("frameTooLarge");
+static const std::string kAidlInvalidStateValue("invalidState");
+}  // namespace clearkeydrm
+
+#endif
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h b/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
new file mode 100644
index 0000000..9257b17
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include <android/binder_auto_utils.h>
+#include "aidl/android/hardware/drm/Status.h"
+#include "ClearKeyTypes.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+inline ::aidl::android::hardware::drm::Status toMockStatus(
+        ::aidl::android::hardware::drm::Status status) {
+    switch (status) {
+        case ::aidl::android::hardware::drm::Status::ERROR_DRM_INSUFFICIENT_SECURITY:
+        case ::aidl::android::hardware::drm::Status::ERROR_DRM_FRAME_TOO_LARGE:
+        case ::aidl::android::hardware::drm::Status::ERROR_DRM_SESSION_LOST_STATE:
+            return ::aidl::android::hardware::drm::Status::ERROR_DRM_UNKNOWN;
+        default:
+            return status;
+    }
+}
+
+inline ::ndk::ScopedAStatus toNdkScopedAStatus(::aidl::android::hardware::drm::Status status,
+                                               const char* msg = nullptr) {
+    if (Status::OK == status) {
+        return ::ndk::ScopedAStatus::ok();
+    } else {
+        auto err = static_cast<int32_t>(status);
+        if (msg) {
+            return ::ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(err, msg);
+        } else {
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+        }
+    }
+}
+
+inline ::ndk::ScopedAStatus toNdkScopedAStatus(clearkeydrm::CdmResponseType res) {
+    return toNdkScopedAStatus(static_cast<::aidl::android::hardware::drm::Status>(res));
+}
+
+#define UNUSED(x) (void)(x);
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl b/drm/mediadrm/plugins/clearkey/aidl/include/CreatePluginFactories.h
similarity index 67%
rename from media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
rename to drm/mediadrm/plugins/clearkey/aidl/include/CreatePluginFactories.h
index b08a604..e2afcf5 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/CreatePluginFactories.h
@@ -13,17 +13,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#pragma once
 
-package android.media;
+#include "DrmFactory.h"
 
-/**
- * Audio encapsulation type is used to describe if the audio data should be sent with a particular
- * encapsulation type or not.
- *
- * {@hide}
- */
-@Backing(type="int")
-enum AudioEncapsulationType {
-    NONE     = 0,
-    IEC61937 = 1,
-}
\ No newline at end of file
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+std::shared_ptr<DrmFactory> createDrmFactory();
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/aidl/include/CryptoPlugin.h
new file mode 100644
index 0000000..60dbf77
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/CryptoPlugin.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <aidl/android/hardware/drm/BnCryptoPlugin.h>
+#include <aidl/android/hardware/drm/Status.h>
+
+#include <aidl/android/hardware/common/Ashmem.h>
+
+#include <android/binder_auto_utils.h>
+
+#include <memory>
+#include <mutex>
+
+#include "ClearKeyTypes.h"
+#include "Session.h"
+
+namespace {
+static const size_t KEY_ID_SIZE = 16;
+static const size_t KEY_IV_SIZE = 16;
+}  // namespace
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+using namespace clearkeydrm;
+using ::aidl::android::hardware::drm::DecryptArgs;
+using ::aidl::android::hardware::drm::Status;
+
+struct SharedBufferBase {
+    uint8_t* mBase;
+    int64_t mSize;
+    SharedBufferBase(const ::aidl::android::hardware::drm::SharedBuffer& mem);
+    ~SharedBufferBase();
+};
+
+struct CryptoPlugin : public BnCryptoPlugin {
+    explicit CryptoPlugin(const std::vector<uint8_t>& sessionId) {
+        const auto res = setMediaDrmSession(sessionId);
+        mInitStatus = Status::OK;
+        if (!res.isOk() && res.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+            mInitStatus = static_cast<Status>(res.getServiceSpecificError());
+        }
+    }
+    virtual ~CryptoPlugin() {}
+
+    ::ndk::ScopedAStatus decrypt(const DecryptArgs& in_args, int32_t* _aidl_return) override;
+
+    ::ndk::ScopedAStatus getLogMessages(
+            std::vector<::aidl::android::hardware::drm::LogMessage>* _aidl_return) override;
+
+    ::ndk::ScopedAStatus notifyResolution(int32_t in_width, int32_t in_height) override;
+
+    ::ndk::ScopedAStatus requiresSecureDecoderComponent(const std::string& in_mime,
+                                                        bool* _aidl_return) override;
+
+    ::ndk::ScopedAStatus setMediaDrmSession(const std::vector<uint8_t>& in_sessionId) override;
+
+    ::ndk::ScopedAStatus setSharedBufferBase(
+            const ::aidl::android::hardware::drm::SharedBuffer& in_base) override;
+
+    ::aidl::android::hardware::drm::Status getInitStatus() const { return mInitStatus; }
+
+  private:
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
+
+    std::mutex mSharedBufferLock;
+    std::map<uint32_t, std::shared_ptr<SharedBufferBase>> mSharedBufferMap
+            GUARDED_BY(mSharedBufferLock);
+    ::android::sp<Session> mSession;
+    ::aidl::android::hardware::drm::Status mInitStatus;
+};
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/DrmFactory.h b/drm/mediadrm/plugins/clearkey/aidl/include/DrmFactory.h
new file mode 100644
index 0000000..1239aa4
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/DrmFactory.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <aidl/android/hardware/drm/BnDrmFactory.h>
+#include <aidl/android/hardware/drm/IDrmFactory.h>
+#include <aidl/android/hardware/drm/IDrmPlugin.h>
+#include <aidl/android/hardware/drm/ICryptoPlugin.h>
+
+#include <string>
+#include <vector>
+
+#include "ClearKeyTypes.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+struct DrmFactory : public BnDrmFactory {
+    DrmFactory() {}
+    virtual ~DrmFactory() {}
+
+    ::ndk::ScopedAStatus createDrmPlugin(
+            const ::aidl::android::hardware::drm::Uuid& in_uuid,
+            const std::string& in_appPackageName,
+            std::shared_ptr<::aidl::android::hardware::drm::IDrmPlugin>* _aidl_return) override;
+
+    ::ndk::ScopedAStatus createCryptoPlugin(
+            const ::aidl::android::hardware::drm::Uuid& in_uuid,
+            const std::vector<uint8_t>& in_initData,
+            std::shared_ptr<::aidl::android::hardware::drm::ICryptoPlugin>* _aidl_return) override;
+
+    ::ndk::ScopedAStatus getSupportedCryptoSchemes(
+            ::aidl::android::hardware::drm::CryptoSchemes* _aidl_return) override;
+
+    binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
+
+
+  private:
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(DrmFactory);
+};
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
new file mode 100644
index 0000000..25c05f0
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <aidl/android/hardware/drm/BnDrmPlugin.h>
+#include <aidl/android/hardware/drm/IDrmPluginListener.h>
+#include <aidl/android/hardware/drm/Status.h>
+
+#include <stdio.h>
+#include <map>
+
+#include <utils/List.h>
+
+#include "DeviceFiles.h"
+#include "SessionLibrary.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace drm {
+namespace clearkey {
+
+using namespace clearkeydrm;
+using ::aidl::android::hardware::drm::KeyType;
+using ::aidl::android::hardware::drm::Status;
+
+struct DrmPlugin : public BnDrmPlugin {
+  public:
+    explicit DrmPlugin(SessionLibrary* sessionLibrary);
+    virtual ~DrmPlugin() { mFileHandle.DeleteAllLicenses(); }
+
+    ::ndk::ScopedAStatus closeSession(const std::vector<uint8_t>& in_sessionId) override;
+    ::ndk::ScopedAStatus decrypt(const std::vector<uint8_t>& in_sessionId,
+                                 const std::vector<uint8_t>& in_keyId,
+                                 const std::vector<uint8_t>& in_input,
+                                 const std::vector<uint8_t>& in_iv,
+                                 std::vector<uint8_t>* _aidl_return) override;
+    ::ndk::ScopedAStatus encrypt(const std::vector<uint8_t>& in_sessionId,
+                                 const std::vector<uint8_t>& in_keyId,
+                                 const std::vector<uint8_t>& in_input,
+                                 const std::vector<uint8_t>& in_iv,
+                                 std::vector<uint8_t>* _aidl_return) override;
+    ::ndk::ScopedAStatus getHdcpLevels(
+            ::aidl::android::hardware::drm::HdcpLevels* _aidl_return) override;
+    ::ndk::ScopedAStatus getKeyRequest(
+            const std::vector<uint8_t>& in_scope, const std::vector<uint8_t>& in_initData,
+            const std::string& in_mimeType, ::aidl::android::hardware::drm::KeyType in_keyType,
+            const std::vector<::aidl::android::hardware::drm::KeyValue>& in_optionalParameters,
+            ::aidl::android::hardware::drm::KeyRequest* _aidl_return) override;
+    ::ndk::ScopedAStatus getLogMessages(
+            std::vector<::aidl::android::hardware::drm::LogMessage>* _aidl_return) override;
+
+    ::ndk::ScopedAStatus getMetrics(
+            std::vector<::aidl::android::hardware::drm::DrmMetricGroup>* _aidl_return) override;
+    ::ndk::ScopedAStatus getNumberOfSessions(
+            ::aidl::android::hardware::drm::NumberOfSessions* _aidl_return) override;
+    ::ndk::ScopedAStatus getOfflineLicenseKeySetIds(
+            std::vector<::aidl::android::hardware::drm::KeySetId>* _aidl_return) override;
+    ::ndk::ScopedAStatus getOfflineLicenseState(
+            const ::aidl::android::hardware::drm::KeySetId& in_keySetId,
+            ::aidl::android::hardware::drm::OfflineLicenseState* _aidl_return) override;
+    ::ndk::ScopedAStatus getPropertyByteArray(const std::string& in_propertyName,
+                                              std::vector<uint8_t>* _aidl_return) override;
+    ::ndk::ScopedAStatus getPropertyString(const std::string& in_propertyName,
+                                           std::string* _aidl_return) override;
+    ::ndk::ScopedAStatus getProvisionRequest(
+            const std::string& in_certificateType, const std::string& in_certificateAuthority,
+            ::aidl::android::hardware::drm::ProvisionRequest* _aidl_return) override;
+    ::ndk::ScopedAStatus getSecureStop(
+            const ::aidl::android::hardware::drm::SecureStopId& in_secureStopId,
+            ::aidl::android::hardware::drm::SecureStop* _aidl_return) override;
+    ::ndk::ScopedAStatus getSecureStopIds(
+            std::vector<::aidl::android::hardware::drm::SecureStopId>* _aidl_return) override;
+    ::ndk::ScopedAStatus getSecureStops(
+            std::vector<::aidl::android::hardware::drm::SecureStop>* _aidl_return) override;
+    ::ndk::ScopedAStatus getSecurityLevel(
+            const std::vector<uint8_t>& in_sessionId,
+            ::aidl::android::hardware::drm::SecurityLevel* _aidl_return) override;
+    ::ndk::ScopedAStatus openSession(::aidl::android::hardware::drm::SecurityLevel in_securityLevel,
+                                     std::vector<uint8_t>* _aidl_return) override;
+    ::ndk::ScopedAStatus provideKeyResponse(
+            const std::vector<uint8_t>& in_scope, const std::vector<uint8_t>& in_response,
+            ::aidl::android::hardware::drm::KeySetId* _aidl_return) override;
+    ::ndk::ScopedAStatus provideProvisionResponse(
+            const std::vector<uint8_t>& in_response,
+            ::aidl::android::hardware::drm::ProvideProvisionResponseResult* _aidl_return) override;
+    ::ndk::ScopedAStatus queryKeyStatus(
+            const std::vector<uint8_t>& in_sessionId,
+            std::vector<::aidl::android::hardware::drm::KeyValue>* _aidl_return) override;
+    ::ndk::ScopedAStatus releaseAllSecureStops() override;
+    ::ndk::ScopedAStatus releaseSecureStop(
+            const ::aidl::android::hardware::drm::SecureStopId& in_secureStopId) override;
+    ::ndk::ScopedAStatus releaseSecureStops(
+            const ::aidl::android::hardware::drm::OpaqueData& in_ssRelease) override;
+    ::ndk::ScopedAStatus removeAllSecureStops() override;
+    ::ndk::ScopedAStatus removeKeys(const std::vector<uint8_t>& in_sessionId) override;
+    ::ndk::ScopedAStatus removeOfflineLicense(
+            const ::aidl::android::hardware::drm::KeySetId& in_keySetId) override;
+    ::ndk::ScopedAStatus removeSecureStop(
+            const ::aidl::android::hardware::drm::SecureStopId& in_secureStopId) override;
+    ::ndk::ScopedAStatus requiresSecureDecoder(
+            const std::string& in_mime, ::aidl::android::hardware::drm::SecurityLevel in_level,
+            bool* _aidl_return) override;
+    ::ndk::ScopedAStatus restoreKeys(
+            const std::vector<uint8_t>& in_sessionId,
+            const ::aidl::android::hardware::drm::KeySetId& in_keySetId) override;
+    ::ndk::ScopedAStatus setCipherAlgorithm(const std::vector<uint8_t>& in_sessionId,
+                                            const std::string& in_algorithm) override;
+    ::ndk::ScopedAStatus setListener(
+            //            const ::android::sp<::aidl::android::hardware::drm::IDrmPluginListener>&
+            //            in_listener)
+            const std::shared_ptr<IDrmPluginListener>& in_listener) override;
+    ::ndk::ScopedAStatus setMacAlgorithm(const std::vector<uint8_t>& in_sessionId,
+                                         const std::string& in_algorithm) override;
+    ::ndk::ScopedAStatus setPlaybackId(const std::vector<uint8_t>& in_sessionId,
+                                       const std::string& in_playbackId) override;
+    ::ndk::ScopedAStatus setPropertyByteArray(const std::string& in_propertyName,
+                                              const std::vector<uint8_t>& in_value) override;
+    ::ndk::ScopedAStatus setPropertyString(const std::string& in_propertyName,
+                                           const std::string& in_value) override;
+    ::ndk::ScopedAStatus sign(const std::vector<uint8_t>& in_sessionId,
+                              const std::vector<uint8_t>& in_keyId,
+                              const std::vector<uint8_t>& in_message,
+                              std::vector<uint8_t>* _aidl_return) override;
+    ::ndk::ScopedAStatus signRSA(const std::vector<uint8_t>& in_sessionId,
+                                 const std::string& in_algorithm,
+                                 const std::vector<uint8_t>& in_message,
+                                 const std::vector<uint8_t>& in_wrappedkey,
+                                 std::vector<uint8_t>* _aidl_return) override;
+    ::ndk::ScopedAStatus verify(const std::vector<uint8_t>& in_sessionId,
+                                const std::vector<uint8_t>& in_keyId,
+                                const std::vector<uint8_t>& in_message,
+                                const std::vector<uint8_t>& in_signature,
+                                bool* _aidl_return) override;
+
+  private:
+    void initProperties();
+    void installSecureStop(const std::vector<uint8_t>& sessionId);
+    bool makeKeySetId(std::string* keySetId);
+    void setPlayPolicy();
+
+    void sendEvent(::aidl::android::hardware::drm::EventType in_eventType,
+                   const std::vector<uint8_t>& in_sessionId,
+                   const std::vector<uint8_t>& in_data);
+    void sendExpirationUpdate(const std::vector<uint8_t>& in_sessionId,
+                              int64_t in_expiryTimeInMS);
+    void sendKeysChange(
+            const std::vector<uint8_t>& in_sessionId,
+            const std::vector<::aidl::android::hardware::drm::KeyStatus>& in_keyStatusList,
+            bool in_hasNewUsableKey);
+    void sendSessionLostState(const std::vector<uint8_t>& in_sessionId);
+
+    Status setSecurityLevel(const std::vector<uint8_t>& sessionId, SecurityLevel level);
+
+    Status getKeyRequestCommon(const std::vector<uint8_t>& scope,
+                               const std::vector<uint8_t>& initData, const std::string& mimeType,
+                               KeyType keyType, const std::vector<KeyValue>& optionalParameters,
+                               std::vector<uint8_t>* request, KeyRequestType* getKeyRequestType,
+                               std::string* defaultUrl);
+
+    struct ClearkeySecureStop {
+        std::vector<uint8_t> id;
+        std::vector<uint8_t> data;
+    };
+
+    std::map<std::vector<uint8_t>, ClearkeySecureStop> mSecureStops;
+    std::vector<KeyValue> mPlayPolicy;
+    std::map<std::string, std::string> mStringProperties;
+    std::map<std::string, std::vector<uint8_t>> mByteArrayProperties;
+    std::map<std::string, std::vector<uint8_t>> mReleaseKeysMap;
+    std::map<std::vector<uint8_t>, std::string> mPlaybackId;
+    std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel;
+    ::std::shared_ptr<IDrmPluginListener> mListener;
+    SessionLibrary* mSessionLibrary;
+    int64_t mOpenSessionOkCount;
+    int64_t mCloseSessionOkCount;
+    int64_t mCloseSessionNotOpenedCount;
+    uint32_t mNextSecureStopId;
+    ::android::Mutex mPlayPolicyLock;
+
+    // set by property to mock error scenarios
+    Status mMockError;
+
+    void processMockError(const ::android::sp<Session>& session) {
+        session->setMockError(static_cast<CdmResponseType>(mMockError));
+        mMockError = Status::OK;
+    }
+
+    DeviceFiles mFileHandle;
+    ::android::Mutex mSecureStopLock;
+
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
+};
+
+}  // namespace clearkey
+}  // namespace drm
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/drm/mediadrm/plugins/clearkey/common/AesCtrDecryptor.cpp b/drm/mediadrm/plugins/clearkey/common/AesCtrDecryptor.cpp
new file mode 100644
index 0000000..0b97820
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/AesCtrDecryptor.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-AesDecryptor"
+
+#include <utils/Log.h>
+
+#include <openssl/aes.h>
+
+#include "AesCtrDecryptor.h"
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+
+static const size_t kBlockBitCount = kBlockSize * 8;
+
+CdmResponseType AesCtrDecryptor::decrypt(const std::vector<uint8_t>& key, const Iv iv,
+                                         const uint8_t* source, uint8_t* destination,
+                                         const std::vector<int32_t>& clearDataLengths,
+                                         const std::vector<int32_t>& encryptedDataLengths,
+                                         size_t* bytesDecryptedOut) {
+
+    if (key.size() != kBlockSize || clearDataLengths.size() != encryptedDataLengths.size()) {
+        android_errorWriteLog(0x534e4554, "63982768");
+        return clearkeydrm::ERROR_DECRYPT;
+    }
+
+    uint32_t blockOffset = 0;
+    uint8_t previousEncryptedCounter[kBlockSize];
+    memset(previousEncryptedCounter, 0, kBlockSize);
+
+    size_t offset = 0;
+    AES_KEY opensslKey;
+    AES_set_encrypt_key(key.data(), kBlockBitCount, &opensslKey);
+    Iv opensslIv;
+    memcpy(opensslIv, iv, sizeof(opensslIv));
+
+    for (size_t i = 0; i < clearDataLengths.size(); ++i) {
+        int32_t numBytesOfClearData = clearDataLengths[i];
+        if (numBytesOfClearData > 0) {
+            memcpy(destination + offset, source + offset, numBytesOfClearData);
+            offset += numBytesOfClearData;
+        }
+
+        int32_t numBytesOfEncryptedData = encryptedDataLengths[i];
+        if (numBytesOfEncryptedData > 0) {
+            AES_ctr128_encrypt(source + offset, destination + offset,
+                               numBytesOfEncryptedData, &opensslKey, opensslIv,
+                               previousEncryptedCounter, &blockOffset);
+            offset += numBytesOfEncryptedData;
+        }
+    }
+
+    *bytesDecryptedOut = offset;
+    return clearkeydrm::OK;
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/Android.bp b/drm/mediadrm/plugins/clearkey/common/Android.bp
index 7ed8b88..a6a5b28 100644
--- a/drm/mediadrm/plugins/clearkey/common/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/common/Android.bp
@@ -44,3 +44,56 @@
         integer_overflow: true,
     },
 }
+
+cc_library_static {
+    name: "libclearkeydevicefiles-protos.common",
+    vendor: true,
+
+    proto: {
+        export_proto_headers: true,
+        type: "lite",
+    },
+    srcs: ["protos/DeviceFiles.proto"],
+}
+
+cc_library_static {
+    name: "libclearkeybase",
+    vendor: true,
+
+    srcs: [
+        "AesCtrDecryptor.cpp",
+        "Base64.cpp",
+        "Buffer.cpp",
+        "ClearKeyUUID.cpp",
+        "DeviceFiles.cpp",
+        "InitDataParser.cpp",
+        "JsonWebKey.cpp",
+        "MemoryFileSystem.cpp",
+        "Session.cpp",
+        "SessionLibrary.cpp",
+        "Utils.cpp",
+    ],
+
+    cflags: ["-Wall", "-Werror"],
+
+    include_dirs: ["frameworks/av/include"],
+
+    shared_libs: [
+        "libutils",
+        "libcrypto",
+    ],
+
+    whole_static_libs: [
+        "libjsmn",
+        "libclearkeydevicefiles-protos.common",
+    ],
+
+    export_include_dirs: [
+        "include",
+        "include/clearkeydrm",
+    ],
+
+    sanitize: {
+        integer_overflow: true,
+    },
+}
diff --git a/drm/mediadrm/plugins/clearkey/common/Base64.cpp b/drm/mediadrm/plugins/clearkey/common/Base64.cpp
new file mode 100644
index 0000000..6499793
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/Base64.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-Base64"
+
+#include "Base64.h"
+
+#include <string>
+
+namespace clearkeydrm {
+
+::android::sp<Buffer> decodeBase64(const std::string& s) {
+    size_t n = s.size();
+
+    if ((n % 4) != 0) {
+        return nullptr;
+    }
+
+    size_t padding = 0;
+    if (n >= 1 && s.c_str()[n - 1] == '=') {
+        padding = 1;
+
+        if (n >= 2 && s.c_str()[n - 2] == '=') {
+            padding = 2;
+
+            if (n >= 3 && s.c_str()[n - 3] == '=') {
+                padding = 3;
+            }
+        }
+    }
+
+    // We divide first to avoid overflow. It's OK to do this because we
+    // already made sure that n % 4 == 0.
+    size_t outLen = (n / 4) * 3 - padding;
+
+    ::android::sp<Buffer> buffer = new Buffer(outLen);
+    uint8_t* out = buffer->data();
+    if (out == nullptr || buffer->size() < outLen) {
+        return nullptr;
+    }
+
+    size_t j = 0;
+    uint32_t accum = 0;
+    for (size_t i = 0; i < n; ++i) {
+        char c = s.c_str()[i];
+        unsigned value;
+        if (c >= 'A' && c <= 'Z') {
+            value = c - 'A';
+        } else if (c >= 'a' && c <= 'z') {
+            value = 26 + c - 'a';
+        } else if (c >= '0' && c <= '9') {
+            value = 52 + c - '0';
+        } else if (c == '+' || c == '-') {
+            value = 62;
+        } else if (c == '/' || c == '_') {
+            value = 63;
+        } else if (c != '=') {
+            return nullptr;
+        } else {
+            if (i < n - padding) {
+                return nullptr;
+            }
+
+            value = 0;
+        }
+
+        accum = (accum << 6) | value;
+
+        if (((i + 1) % 4) == 0) {
+            if (j < outLen) {
+                out[j++] = (accum >> 16);
+            }
+            if (j < outLen) {
+                out[j++] = (accum >> 8) & 0xff;
+            }
+            if (j < outLen) {
+                out[j++] = accum & 0xff;
+            }
+
+            accum = 0;
+        }
+    }
+
+    return buffer;
+}
+
+static char encode6Bit(unsigned x) {
+    if (x <= 25) {
+        return 'A' + x;
+    } else if (x <= 51) {
+        return 'a' + x - 26;
+    } else if (x <= 61) {
+        return '0' + x - 52;
+    } else if (x == 62) {
+        return '+';
+    } else {
+        return '/';
+    }
+}
+
+void encodeBase64(const void* _data, size_t size, std::string* out) {
+    out->clear();
+
+    const uint8_t* data = (const uint8_t*)_data;
+
+    size_t i;
+    for (i = 0; i < (size / 3) * 3; i += 3) {
+        uint8_t x1 = data[i];
+        uint8_t x2 = data[i + 1];
+        uint8_t x3 = data[i + 2];
+
+        out->push_back(encode6Bit(x1 >> 2));
+        out->push_back(encode6Bit((x1 << 4 | x2 >> 4) & 0x3f));
+        out->push_back(encode6Bit((x2 << 2 | x3 >> 6) & 0x3f));
+        out->push_back(encode6Bit(x3 & 0x3f));
+    }
+    switch (size % 3) {
+        case 0:
+            break;
+        case 2: {
+            uint8_t x1 = data[i];
+            uint8_t x2 = data[i + 1];
+            out->push_back(encode6Bit(x1 >> 2));
+            out->push_back(encode6Bit((x1 << 4 | x2 >> 4) & 0x3f));
+            out->push_back(encode6Bit((x2 << 2) & 0x3f));
+            out->push_back('=');
+            break;
+        }
+        default: {
+            uint8_t x1 = data[i];
+            out->push_back(encode6Bit(x1 >> 2));
+            out->push_back(encode6Bit((x1 << 4) & 0x3f));
+            out->append("==");
+            break;
+        }
+    }
+}
+
+void encodeBase64Url(const void* _data, size_t size, std::string* out) {
+    encodeBase64(_data, size, out);
+
+    if ((std::string::npos != out->find("+")) || (std::string::npos != out->find("/"))) {
+        size_t outLen = out->size();
+        char* base64url = new char[outLen];
+        for (size_t i = 0; i < outLen; ++i) {
+            if (out->c_str()[i] == '+')
+                base64url[i] = '-';
+            else if (out->c_str()[i] == '/')
+                base64url[i] = '_';
+            else
+                base64url[i] = out->c_str()[i];
+        }
+
+        out->assign(base64url, outLen);
+        delete[] base64url;
+    }
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/Buffer.cpp b/drm/mediadrm/plugins/clearkey/common/Buffer.cpp
new file mode 100644
index 0000000..1671598
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/Buffer.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "Buffer.h"
+
+namespace clearkeydrm {
+
+Buffer::Buffer(size_t capacity) : mRangeOffset(0), mOwnsData(true) {
+    mData = malloc(capacity);
+    if (mData == nullptr) {
+        mCapacity = 0;
+        mRangeLength = 0;
+    } else {
+        mCapacity = capacity;
+        mRangeLength = capacity;
+    }
+}
+
+Buffer::~Buffer() {
+    if (mOwnsData) {
+        if (mData != nullptr) {
+            free(mData);
+            mData = nullptr;
+        }
+    }
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/DeviceFiles.cpp b/drm/mediadrm/plugins/clearkey/common/DeviceFiles.cpp
new file mode 100644
index 0000000..2299249
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/DeviceFiles.cpp
@@ -0,0 +1,251 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <utils/Log.h>
+
+#include <sys/stat.h>
+#include <string>
+
+#include "DeviceFiles.h"
+#include "protos/DeviceFiles.pb.h"
+
+#include <openssl/sha.h>
+
+// Protobuf generated classes.
+using clearkeydrm::HashedFile;
+using clearkeydrm::License;
+using clearkeydrm::License_LicenseState_ACTIVE;
+using clearkeydrm::License_LicenseState_RELEASING;
+using clearkeydrm::OfflineFile;
+
+namespace {
+const char kLicenseFileNameExt[] = ".lic";
+
+bool Hash(const std::string& data, std::string* hash) {
+    if (!hash) return false;
+
+    hash->resize(SHA256_DIGEST_LENGTH);
+
+    const unsigned char* input = reinterpret_cast<const unsigned char*>(data.data());
+    unsigned char* output = reinterpret_cast<unsigned char*>(&(*hash)[0]);
+    SHA256(input, data.size(), output);
+    return true;
+}
+
+}  // namespace
+
+namespace clearkeydrm {
+
+bool DeviceFiles::StoreLicense(const std::string& keySetId, LicenseState state,
+                               const std::string& licenseResponse) {
+    OfflineFile file;
+    file.set_type(OfflineFile::LICENSE);
+    file.set_version(OfflineFile::VERSION_1);
+
+    License* license = file.mutable_license();
+    switch (state) {
+        case kLicenseStateActive:
+            license->set_state(License_LicenseState_ACTIVE);
+            license->set_license(licenseResponse);
+            break;
+        case kLicenseStateReleasing:
+            license->set_state(License_LicenseState_RELEASING);
+            license->set_license(licenseResponse);
+            break;
+        default:
+            ALOGW("StoreLicense: Unknown license state: %u", state);
+            return false;
+    }
+
+    std::string serializedFile;
+    file.SerializeToString(&serializedFile);
+
+    return StoreFileWithHash(keySetId + kLicenseFileNameExt, serializedFile);
+}
+
+bool DeviceFiles::StoreFileWithHash(const std::string& fileName,
+                                    const std::string& serializedFile) {
+    std::string hash;
+    if (!Hash(serializedFile, &hash)) {
+        ALOGE("StoreFileWithHash: Failed to compute hash");
+        return false;
+    }
+
+    HashedFile hashFile;
+    hashFile.set_file(serializedFile);
+    hashFile.set_hash(hash);
+
+    std::string serializedHashFile;
+    hashFile.SerializeToString(&serializedHashFile);
+
+    return StoreFileRaw(fileName, serializedHashFile);
+}
+
+bool DeviceFiles::StoreFileRaw(const std::string& fileName, const std::string& serializedHashFile) {
+    MemoryFileSystem::MemoryFile memFile;
+    memFile.setFileName(fileName);
+    memFile.setContent(serializedHashFile);
+    memFile.setFileSize(serializedHashFile.size());
+    size_t len = mFileHandle.Write(fileName, memFile);
+
+    if (len != static_cast<size_t>(serializedHashFile.size())) {
+        ALOGE("StoreFileRaw: Failed to write %s", fileName.c_str());
+        ALOGD("StoreFileRaw: expected=%zd, actual=%zu", serializedHashFile.size(), len);
+        return false;
+    }
+
+    ALOGD("StoreFileRaw: wrote %zu bytes to %s", serializedHashFile.size(), fileName.c_str());
+    return true;
+}
+
+bool DeviceFiles::RetrieveLicense(const std::string& keySetId, LicenseState* state,
+                                  std::string* offlineLicense) {
+    OfflineFile file;
+    if (!RetrieveHashedFile(keySetId + kLicenseFileNameExt, &file)) {
+        return false;
+    }
+
+    if (file.type() != OfflineFile::LICENSE) {
+        ALOGE("RetrieveLicense: Invalid file type");
+        return false;
+    }
+
+    if (file.version() != OfflineFile::VERSION_1) {
+        ALOGE("RetrieveLicense: Invalid file version");
+        return false;
+    }
+
+    if (!file.has_license()) {
+        ALOGE("RetrieveLicense: License not present");
+        return false;
+    }
+
+    License license = file.license();
+    switch (license.state()) {
+        case License_LicenseState_ACTIVE:
+            *state = kLicenseStateActive;
+            break;
+        case License_LicenseState_RELEASING:
+            *state = kLicenseStateReleasing;
+            break;
+        default:
+            ALOGW("RetrieveLicense: Unrecognized license state: %u", kLicenseStateUnknown);
+            *state = kLicenseStateUnknown;
+            break;
+    }
+    *offlineLicense = license.license();
+    return true;
+}
+
+bool DeviceFiles::DeleteLicense(const std::string& keySetId) {
+    return mFileHandle.RemoveFile(keySetId + kLicenseFileNameExt);
+}
+
+bool DeviceFiles::DeleteAllLicenses() {
+    return mFileHandle.RemoveAllFiles();
+}
+
+bool DeviceFiles::LicenseExists(const std::string& keySetId) {
+    return mFileHandle.FileExists(keySetId + kLicenseFileNameExt);
+}
+
+std::vector<std::string> DeviceFiles::ListLicenses() const {
+    std::vector<std::string> licenses = mFileHandle.ListFiles();
+    for (size_t i = 0; i < licenses.size(); i++) {
+        std::string& license = licenses[i];
+        license = license.substr(0, license.size() - strlen(kLicenseFileNameExt));
+    }
+    return licenses;
+}
+
+bool DeviceFiles::RetrieveHashedFile(const std::string& fileName, OfflineFile* deSerializedFile) {
+    if (!deSerializedFile) {
+        ALOGE("RetrieveHashedFile: invalid file parameter");
+        return false;
+    }
+
+    if (!FileExists(fileName)) {
+        ALOGE("RetrieveHashedFile: %s does not exist", fileName.c_str());
+        return false;
+    }
+
+    ssize_t bytes = GetFileSize(fileName);
+    if (bytes <= 0) {
+        ALOGE("RetrieveHashedFile: invalid file size: %s", fileName.c_str());
+        // Remove the corrupted file so the caller will not get the same error
+        // when trying to access the file repeatedly, causing the system to stall.
+        RemoveFile(fileName);
+        return false;
+    }
+
+    std::string serializedHashFile;
+    serializedHashFile.resize(bytes);
+    bytes = mFileHandle.Read(fileName, &serializedHashFile);
+
+    if (bytes != static_cast<ssize_t>(serializedHashFile.size())) {
+        ALOGE("RetrieveHashedFile: Failed to read from %s", fileName.c_str());
+        ALOGV("RetrieveHashedFile: expected: %zd, actual: %zd", serializedHashFile.size(), bytes);
+        // Remove the corrupted file so the caller will not get the same error
+        // when trying to access the file repeatedly, causing the system to stall.
+        RemoveFile(fileName);
+        return false;
+    }
+
+    ALOGV("RetrieveHashedFile: read %zd from %s", bytes, fileName.c_str());
+
+    HashedFile hashFile;
+    if (!hashFile.ParseFromString(serializedHashFile)) {
+        ALOGE("RetrieveHashedFile: Unable to parse hash file");
+        // Remove corrupt file.
+        RemoveFile(fileName);
+        return false;
+    }
+
+    std::string hash;
+    if (!Hash(hashFile.file(), &hash)) {
+        ALOGE("RetrieveHashedFile: Hash computation failed");
+        return false;
+    }
+
+    if (hash != hashFile.hash()) {
+        ALOGE("RetrieveHashedFile: Hash mismatch");
+        // Remove corrupt file.
+        RemoveFile(fileName);
+        return false;
+    }
+
+    if (!deSerializedFile->ParseFromString(hashFile.file())) {
+        ALOGE("RetrieveHashedFile: Unable to parse file");
+        // Remove corrupt file.
+        RemoveFile(fileName);
+        return false;
+    }
+
+    return true;
+}
+
+bool DeviceFiles::FileExists(const std::string& fileName) const {
+    return mFileHandle.FileExists(fileName);
+}
+
+bool DeviceFiles::RemoveFile(const std::string& fileName) {
+    return mFileHandle.RemoveFile(fileName);
+}
+
+ssize_t DeviceFiles::GetFileSize(const std::string& fileName) const {
+    return mFileHandle.GetFileSize(fileName);
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/InitDataParser.cpp b/drm/mediadrm/plugins/clearkey/common/InitDataParser.cpp
new file mode 100644
index 0000000..fc839e9
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/InitDataParser.cpp
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "clearkey-InitDataParser"
+
+#include <algorithm>
+#include <arpa/inet.h>
+#include <utils/Log.h>
+
+#include "InitDataParser.h"
+
+#include "Base64.h"
+
+#include "ClearKeyUUID.h"
+#include "MimeType.h"
+
+namespace {
+const size_t kKeyIdSize = 16;
+const size_t kSystemIdSize = 16;
+}  // namespace
+
+namespace clearkeydrm {
+
+std::vector<uint8_t> StrToVector(const std::string& str) {
+    std::vector<uint8_t> vec(str.begin(), str.end());
+    return vec;
+}
+
+CdmResponseType InitDataParser::parse(const std::vector<uint8_t>& initData,
+                                      const std::string& mimeType,
+                                      CdmKeyType keyType,
+                                      std::vector<uint8_t>* licenseRequest) {
+    // Build a list of the key IDs
+    std::vector<const uint8_t*> keyIds;
+
+    if (mimeType == kIsoBmffVideoMimeType.c_str() || mimeType == kIsoBmffAudioMimeType.c_str() ||
+        mimeType == kCencInitDataFormat.c_str()) {
+        auto res = parsePssh(initData, &keyIds);
+        if (res != clearkeydrm::OK) {
+            return res;
+        }
+    } else if (mimeType == kWebmVideoMimeType.c_str() || mimeType == kWebmAudioMimeType.c_str() ||
+               mimeType == kWebmInitDataFormat.c_str()) {
+        // WebM "init data" is just a single key ID
+        if (initData.size() != kKeyIdSize) {
+            return clearkeydrm::ERROR_CANNOT_HANDLE;
+        }
+        keyIds.push_back(initData.data());
+    } else {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+
+    if (keyType == clearkeydrm::KEY_TYPE_RELEASE) {
+        // restore key
+    }
+
+    // Build the request
+    std::string requestJson = generateRequest(keyType, keyIds);
+    std::vector<uint8_t> requestJsonVec = StrToVector(requestJson);
+
+    licenseRequest->clear();
+    licenseRequest->insert(licenseRequest->end(), requestJsonVec.begin(), requestJsonVec.end());
+    return clearkeydrm::OK;
+}
+
+CdmResponseType InitDataParser::parsePssh(const std::vector<uint8_t>& initData,
+                                          std::vector<const uint8_t*>* keyIds) {
+    // Description of PSSH format:
+    // https://w3c.github.io/encrypted-media/format-registry/initdata/cenc.html
+    size_t readPosition = 0;
+
+    uint32_t expectedSize = initData.size();
+    const char psshIdentifier[4] = {'p', 's', 's', 'h'};
+    const uint8_t psshVersion1[4] = {1, 0, 0, 0};
+    uint32_t keyIdCount = 0;
+    size_t headerSize = sizeof(expectedSize) + sizeof(psshIdentifier) + sizeof(psshVersion1) +
+                        kSystemIdSize + sizeof(keyIdCount);
+    if (initData.size() < headerSize) {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+
+    // Validate size field
+    expectedSize = htonl(expectedSize);
+    if (memcmp(&initData[readPosition], &expectedSize, sizeof(expectedSize)) != 0) {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+    readPosition += sizeof(expectedSize);
+
+    // Validate PSSH box identifier
+    if (memcmp(&initData[readPosition], psshIdentifier, sizeof(psshIdentifier)) != 0) {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+    readPosition += sizeof(psshIdentifier);
+
+    // Validate EME version number
+    if (memcmp(&initData[readPosition], psshVersion1, sizeof(psshVersion1)) != 0) {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+    readPosition += sizeof(psshVersion1);
+
+    // Validate system ID
+    if (!isClearKeyUUID(&initData[readPosition])) {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+    readPosition += kSystemIdSize;
+
+    // Read key ID count
+    memcpy(&keyIdCount, &initData[readPosition], sizeof(keyIdCount));
+    keyIdCount = ntohl(keyIdCount);
+    readPosition += sizeof(keyIdCount);
+
+    uint64_t psshSize = 0;
+    if (__builtin_mul_overflow(keyIdCount, kKeyIdSize, &psshSize) ||
+        __builtin_add_overflow(readPosition, psshSize, &psshSize) ||
+        psshSize != initData.size() - sizeof(uint32_t) /* DataSize(0) */) {
+        return clearkeydrm::ERROR_CANNOT_HANDLE;
+    }
+
+    // Calculate the key ID offsets
+    for (uint32_t i = 0; i < keyIdCount; ++i) {
+        size_t keyIdPosition = readPosition + (i * kKeyIdSize);
+        keyIds->push_back(&initData[keyIdPosition]);
+    }
+    return clearkeydrm::OK;
+}
+
+std::string InitDataParser::generateRequest(CdmKeyType keyType,
+                                            const std::vector<const uint8_t*>& keyIds) {
+    const std::string kRequestPrefix("{\"kids\":[");
+    const std::string kTemporarySession("],\"type\":\"temporary\"}");
+    const std::string kPersistentSession("],\"type\":\"persistent-license\"}");
+
+    std::string request(kRequestPrefix);
+    std::string encodedId;
+    for (size_t i = 0; i < keyIds.size(); ++i) {
+        encodedId.clear();
+        encodeBase64Url(keyIds[i], kKeyIdSize, &encodedId);
+        if (i != 0) {
+            request.append(",");
+        }
+        request.push_back('\"');
+        request.append(encodedId);
+        request.push_back('\"');
+    }
+    if (keyType == clearkeydrm::KEY_TYPE_STREAMING) {
+        request.append(kTemporarySession);
+    } else if (keyType == clearkeydrm::KEY_TYPE_OFFLINE ||
+               keyType == clearkeydrm::KEY_TYPE_RELEASE) {
+        request.append(kPersistentSession);
+    }
+
+    // Android's Base64 encoder produces padding. EME forbids padding.
+    const char kBase64Padding = '=';
+    request.erase(std::remove(request.begin(), request.end(), kBase64Padding), request.end());
+
+    return request;
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
new file mode 100644
index 0000000..ddbc594
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-JsonWebKey"
+
+#include <utils/Log.h>
+
+#include "JsonWebKey.h"
+
+#include "Base64.h"
+
+namespace {
+const std::string kBase64Padding("=");
+const std::string kKeysTag("keys");
+const std::string kKeyTypeTag("kty");
+const std::string kKeyTag("k");
+const std::string kKeyIdTag("kid");
+const std::string kMediaSessionType("type");
+const std::string kPersistentLicenseSession("persistent-license");
+const std::string kSymmetricKeyValue("oct");
+const std::string kTemporaryLicenseSession("temporary");
+}  // namespace
+
+namespace clearkeydrm {
+
+JsonWebKey::JsonWebKey() {}
+
+JsonWebKey::~JsonWebKey() {}
+
+/*
+ * Parses a JSON Web Key Set string, initializes a KeyMap with key id:key
+ * pairs from the JSON Web Key Set. Both key ids and keys are base64url
+ * encoded. The KeyMap contains base64url decoded key id:key pairs.
+ *
+ * @return Returns false for errors, true for success.
+ */
+bool JsonWebKey::extractKeysFromJsonWebKeySet(const std::string& jsonWebKeySet, KeyMap* keys) {
+    keys->clear();
+
+    if (!parseJsonWebKeySet(jsonWebKeySet, &mJsonObjects)) {
+        return false;
+    }
+
+    // mJsonObjects[0] contains the entire JSON Web Key Set, including
+    // all the base64 encoded keys. Each key is also stored separately as
+    // a JSON object in mJsonObjects[1..n] where n is the total
+    // number of keys in the set.
+    if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
+        return false;
+    }
+
+    std::string encodedKey, encodedKeyId;
+    std::vector<uint8_t> decodedKey, decodedKeyId;
+
+    // mJsonObjects[1] contains the first JSON Web Key in the set
+    for (size_t i = 1; i < mJsonObjects.size(); ++i) {
+        encodedKeyId.clear();
+        encodedKey.clear();
+
+        if (!parseJsonObject(mJsonObjects[i], &mTokens)) return false;
+
+        if (findKey(mJsonObjects[i], &encodedKeyId, &encodedKey)) {
+            if (encodedKeyId.empty() || encodedKey.empty()) {
+                ALOGE("Must have both key id and key in the JsonWebKey set.");
+                continue;
+            }
+
+            if (!decodeBase64String(encodedKeyId, &decodedKeyId)) {
+                ALOGE("Failed to decode key id(%s)", encodedKeyId.c_str());
+                continue;
+            }
+
+            if (!decodeBase64String(encodedKey, &decodedKey)) {
+                ALOGE("Failed to decode key(%s)", encodedKey.c_str());
+                continue;
+            }
+
+            keys->insert(std::pair<std::vector<uint8_t>, std::vector<uint8_t>>(decodedKeyId,
+                                                                               decodedKey));
+        }
+    }
+    return true;
+}
+
+bool JsonWebKey::decodeBase64String(const std::string& encodedText,
+                                    std::vector<uint8_t>* decodedText) {
+    decodedText->clear();
+
+    // encodedText should not contain padding characters as per EME spec.
+    if (encodedText.find(kBase64Padding) != std::string::npos) {
+        return false;
+    }
+
+    // Since decodeBase64() requires padding characters,
+    // add them so length of encodedText is exactly a multiple of 4.
+    int remainder = encodedText.length() % 4;
+    std::string paddedText(encodedText);
+    if (remainder > 0) {
+        for (int i = 0; i < 4 - remainder; ++i) {
+            paddedText.append(kBase64Padding);
+        }
+    }
+
+    ::android::sp<Buffer> buffer = decodeBase64(paddedText);
+    if (buffer == nullptr) {
+        ALOGE("Malformed base64 encoded content found.");
+        return false;
+    }
+
+    decodedText->insert(decodedText->end(), buffer->base(), buffer->base() + buffer->size());
+    return true;
+}
+
+bool JsonWebKey::findKey(const std::string& jsonObject, std::string* keyId,
+                         std::string* encodedKey) {
+    std::string key, value;
+
+    // Only allow symmetric key, i.e. "kty":"oct" pair.
+    if (jsonObject.find(kKeyTypeTag) != std::string::npos) {
+        findValue(kKeyTypeTag, &value);
+        if (0 != value.compare(kSymmetricKeyValue)) return false;
+    }
+
+    if (jsonObject.find(kKeyIdTag) != std::string::npos) {
+        findValue(kKeyIdTag, keyId);
+    }
+
+    if (jsonObject.find(kKeyTag) != std::string::npos) {
+        findValue(kKeyTag, encodedKey);
+    }
+    return true;
+}
+
+void JsonWebKey::findValue(const std::string& key, std::string* value) {
+    value->clear();
+    const char* valueToken;
+    for (std::vector<std::string>::const_iterator nextToken = mTokens.begin();
+         nextToken != mTokens.end(); ++nextToken) {
+        if (0 == (*nextToken).compare(key)) {
+            if (nextToken + 1 == mTokens.end()) break;
+            valueToken = (*(nextToken + 1)).c_str();
+            value->assign(valueToken);
+            nextToken++;
+            break;
+        }
+    }
+}
+
+bool JsonWebKey::isJsonWebKeySet(const std::string& jsonObject) const {
+    if (jsonObject.find(kKeysTag) == std::string::npos) {
+        ALOGE("JSON Web Key does not contain keys.");
+        return false;
+    }
+    return true;
+}
+
+/*
+ * Parses a JSON objects string and initializes a vector of tokens.
+ *
+ * @return Returns false for errors, true for success.
+ */
+bool JsonWebKey::parseJsonObject(const std::string& jsonObject, std::vector<std::string>* tokens) {
+    jsmn_parser parser;
+
+    jsmn_init(&parser);
+    int numTokens = jsmn_parse(&parser, jsonObject.c_str(), jsonObject.size(), nullptr, 0);
+    if (numTokens < 0) {
+        ALOGE("Parser returns error code=%d", numTokens);
+        return false;
+    }
+
+    unsigned int jsmnTokensSize = numTokens * sizeof(jsmntok_t);
+    mJsmnTokens.clear();
+    mJsmnTokens.resize(jsmnTokensSize);
+
+    jsmn_init(&parser);
+    int status = jsmn_parse(&parser, jsonObject.c_str(), jsonObject.size(), mJsmnTokens.data(),
+                            numTokens);
+    if (status < 0) {
+        ALOGE("Parser returns error code=%d", status);
+        return false;
+    }
+
+    tokens->clear();
+    std::string token;
+    const char* pjs;
+    for (int j = 0; j < numTokens; ++j) {
+        pjs = jsonObject.c_str() + mJsmnTokens[j].start;
+        if (mJsmnTokens[j].type == JSMN_STRING || mJsmnTokens[j].type == JSMN_PRIMITIVE) {
+            token.assign(pjs, mJsmnTokens[j].end - mJsmnTokens[j].start);
+            tokens->push_back(token);
+        }
+    }
+    return true;
+}
+
+/*
+ * Parses JSON Web Key Set string and initializes a vector of JSON objects.
+ *
+ * @return Returns false for errors, true for success.
+ */
+bool JsonWebKey::parseJsonWebKeySet(const std::string& jsonWebKeySet,
+                                    std::vector<std::string>* jsonObjects) {
+    if (jsonWebKeySet.empty()) {
+        ALOGE("Empty JSON Web Key");
+        return false;
+    }
+
+    // The jsmn parser only supports unicode encoding.
+    jsmn_parser parser;
+
+    // Computes number of tokens. A token marks the type, offset in
+    // the original string.
+    jsmn_init(&parser);
+    int numTokens = jsmn_parse(&parser, jsonWebKeySet.c_str(), jsonWebKeySet.size(), nullptr, 0);
+    if (numTokens < 0) {
+        ALOGE("Parser returns error code=%d", numTokens);
+        return false;
+    }
+
+    unsigned int jsmnTokensSize = numTokens * sizeof(jsmntok_t);
+    mJsmnTokens.resize(jsmnTokensSize);
+
+    jsmn_init(&parser);
+    int status = jsmn_parse(&parser, jsonWebKeySet.c_str(), jsonWebKeySet.size(),
+                            mJsmnTokens.data(), numTokens);
+    if (status < 0) {
+        ALOGE("Parser returns error code=%d", status);
+        return false;
+    }
+
+    std::string token;
+    const char* pjs;
+    for (int i = 0; i < numTokens; ++i) {
+        pjs = jsonWebKeySet.c_str() + mJsmnTokens[i].start;
+        if (mJsmnTokens[i].type == JSMN_OBJECT) {
+            token.assign(pjs, mJsmnTokens[i].end - mJsmnTokens[i].start);
+            jsonObjects->push_back(token);
+        }
+    }
+    return true;
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/common/MemoryFileSystem.cpp
new file mode 100644
index 0000000..1045458
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/MemoryFileSystem.cpp
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <utils/Log.h>
+#include <string>
+#include <vector>
+
+#include "MemoryFileSystem.h"
+
+namespace clearkeydrm {
+
+std::string MemoryFileSystem::GetFileName(const std::string& path) {
+    size_t index = path.find_last_of('/');
+    if (index != std::string::npos) {
+        return path.substr(index + 1);
+    } else {
+        return path;
+    }
+}
+
+bool MemoryFileSystem::FileExists(const std::string& fileName) const {
+    auto result = mMemoryFileSystem.find(fileName);
+    return result != mMemoryFileSystem.end();
+}
+
+ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
+    auto result = mMemoryFileSystem.find(fileName);
+    if (result != mMemoryFileSystem.end()) {
+        return static_cast<ssize_t>(result->second.getFileSize());
+    } else {
+        ALOGE("Failed to get size for %s", fileName.c_str());
+        return -1;
+    }
+}
+
+std::vector<std::string> MemoryFileSystem::ListFiles() const {
+    std::vector<std::string> list;
+    for (const auto& filename : mMemoryFileSystem) {
+        list.push_back(filename.first);
+    }
+    return list;
+}
+
+size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
+    std::string key = GetFileName(path);
+    auto result = mMemoryFileSystem.find(key);
+    if (result != mMemoryFileSystem.end()) {
+        std::string serializedHashFile = result->second.getContent();
+        buffer->assign(serializedHashFile);
+        return buffer->size();
+    } else {
+        ALOGE("Failed to read from %s", path.c_str());
+        return -1;
+    }
+}
+
+size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
+    std::string key = GetFileName(path);
+    auto result = mMemoryFileSystem.find(key);
+    if (result != mMemoryFileSystem.end()) {
+        mMemoryFileSystem.erase(key);
+    }
+    mMemoryFileSystem.insert(std::pair<std::string, MemoryFile>(key, memoryFile));
+    return memoryFile.getFileSize();
+}
+
+bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
+    auto result = mMemoryFileSystem.find(fileName);
+    if (result != mMemoryFileSystem.end()) {
+        mMemoryFileSystem.erase(result);
+        return true;
+    } else {
+        ALOGE("Cannot find license to remove: %s", fileName.c_str());
+        return false;
+    }
+}
+
+bool MemoryFileSystem::RemoveAllFiles() {
+    mMemoryFileSystem.clear();
+    return mMemoryFileSystem.empty();
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/Session.cpp b/drm/mediadrm/plugins/clearkey/common/Session.cpp
new file mode 100644
index 0000000..d7fd13a
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/Session.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-Session"
+
+#include <utils/Log.h>
+
+#include "Session.h"
+
+#include "AesCtrDecryptor.h"
+#include "InitDataParser.h"
+#include "JsonWebKey.h"
+
+namespace clearkeydrm {
+
+using ::android::Mutex;
+using ::android::sp;
+
+CdmResponseType Session::getKeyRequest(const std::vector<uint8_t>& initData,
+                                       const std::string& mimeType,
+                                       CdmKeyType keyType,
+                                       std::vector<uint8_t>* keyRequest) const {
+    InitDataParser parser;
+    return parser.parse(initData, mimeType, keyType, keyRequest);
+}
+
+CdmResponseType Session::provideKeyResponse(const std::vector<uint8_t>& response) {
+    std::string responseString(reinterpret_cast<const char*>(response.data()), response.size());
+    KeyMap keys;
+
+    Mutex::Autolock lock(mMapLock);
+    JsonWebKey parser;
+    if (parser.extractKeysFromJsonWebKeySet(responseString, &keys)) {
+        for (auto& key : keys) {
+            std::string first(key.first.begin(), key.first.end());
+            std::string second(key.second.begin(), key.second.end());
+            mKeyMap.insert(
+                    std::pair<std::vector<uint8_t>, std::vector<uint8_t>>(key.first, key.second));
+        }
+        return clearkeydrm::OK;
+    } else {
+        return clearkeydrm::ERROR_UNKNOWN;
+    }
+}
+
+CdmResponseType Session::decrypt(const KeyId keyId, const Iv iv,
+                                 const uint8_t* srcPtr, uint8_t* destPtr,
+                                 const std::vector<int32_t>& clearDataLengths,
+                                 const std::vector<int32_t>& encryptedDataLengths,
+                                 size_t* bytesDecryptedOut) {
+    Mutex::Autolock lock(mMapLock);
+
+    if (getMockError() != clearkeydrm::OK) {
+        return getMockError();
+    }
+
+    std::vector<uint8_t> keyIdVector;
+    keyIdVector.clear();
+    keyIdVector.insert(keyIdVector.end(), keyId, keyId + kBlockSize);
+    std::map<std::vector<uint8_t>, std::vector<uint8_t>>::iterator itr;
+    itr = mKeyMap.find(keyIdVector);
+    if (itr == mKeyMap.end()) {
+        return clearkeydrm::ERROR_NO_LICENSE;
+    }
+
+    clearkeydrm::AesCtrDecryptor decryptor;
+    auto status = decryptor.decrypt(itr->second /*key*/, iv, srcPtr, destPtr,
+                                    clearDataLengths,
+                                    encryptedDataLengths,
+                                    bytesDecryptedOut);
+    return status;
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/SessionLibrary.cpp b/drm/mediadrm/plugins/clearkey/common/SessionLibrary.cpp
new file mode 100644
index 0000000..6b2ff38
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/SessionLibrary.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "clearkey-SessionLibrary"
+
+#include <utils/Log.h>
+
+#include "SessionLibrary.h"
+
+namespace clearkeydrm {
+
+using ::android::Mutex;
+using ::android::sp;
+
+Mutex SessionLibrary::sSingletonLock;
+SessionLibrary* SessionLibrary::sSingleton = NULL;
+
+SessionLibrary* SessionLibrary::get() {
+    Mutex::Autolock lock(sSingletonLock);
+
+    if (sSingleton == NULL) {
+        ALOGD("Instantiating Session Library Singleton.");
+        sSingleton = new SessionLibrary();
+    }
+
+    return sSingleton;
+}
+
+sp<Session> SessionLibrary::createSession() {
+    Mutex::Autolock lock(mSessionsLock);
+
+    char sessionIdRaw[16];
+    snprintf(sessionIdRaw, sizeof(sessionIdRaw), "%u", mNextSessionId);
+
+    mNextSessionId += 1;
+
+    std::vector<uint8_t> sessionId;
+    sessionId.insert(sessionId.end(), sessionIdRaw,
+                     sessionIdRaw + sizeof(sessionIdRaw) / sizeof(uint8_t));
+
+    mSessions.insert(
+            std::pair<std::vector<uint8_t>, sp<Session>>(sessionId, new Session(sessionId)));
+    std::map<std::vector<uint8_t>, sp<Session>>::iterator itr = mSessions.find(sessionId);
+    if (itr != mSessions.end()) {
+        return itr->second;
+    } else {
+        return nullptr;
+    }
+}
+
+sp<Session> SessionLibrary::findSession(const std::vector<uint8_t>& sessionId) {
+    Mutex::Autolock lock(mSessionsLock);
+    std::map<std::vector<uint8_t>, sp<Session>>::iterator itr = mSessions.find(sessionId);
+    if (itr != mSessions.end()) {
+        return itr->second;
+    } else {
+        return nullptr;
+    }
+}
+
+void SessionLibrary::destroySession(const sp<Session>& session) {
+    Mutex::Autolock lock(mSessionsLock);
+    mSessions.erase(session->sessionId());
+}
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/ClearKeyUUID.h b/drm/mediadrm/plugins/clearkey/common/include/ClearKeyUUID.h
index fe10fba..8911024 100644
--- a/drm/mediadrm/plugins/clearkey/common/include/ClearKeyUUID.h
+++ b/drm/mediadrm/plugins/clearkey/common/include/ClearKeyUUID.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2014 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,9 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
-#ifndef CLEARKEY_UUID_H_
-#define CLEARKEY_UUID_H_
+#pragma once
 
 #include <array>
 #include <cstdint>
@@ -27,6 +25,4 @@
 
 std::vector<std::array<uint8_t, 16>> getSupportedCryptoSchemes();
 
-} // namespace clearkeydrm
-
-#endif // CLEARKEY_UUID_H_
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/AesCtrDecryptor.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/AesCtrDecryptor.h
new file mode 100644
index 0000000..dbf3098
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/AesCtrDecryptor.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <cstdint>
+
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+
+class AesCtrDecryptor {
+  public:
+    AesCtrDecryptor() {}
+
+    CdmResponseType decrypt(const std::vector<uint8_t>& key, const Iv iv, const uint8_t* source,
+                            uint8_t* destination,
+                            const std::vector<int32_t>& clearDataLengths,
+                            const std::vector<int32_t>& encryptedDataLengths,
+                            size_t* bytesDecryptedOut);
+
+  private:
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(AesCtrDecryptor);
+};
+
+}  // namespace clearkeydrm
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Base64.h
similarity index 66%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
copy to drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Base64.h
index b08a604..075d247 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Base64.h
@@ -13,17 +13,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#pragma once
 
-package android.media;
+#include "Buffer.h"
 
-/**
- * Audio encapsulation type is used to describe if the audio data should be sent with a particular
- * encapsulation type or not.
- *
- * {@hide}
- */
-@Backing(type="int")
-enum AudioEncapsulationType {
-    NONE     = 0,
-    IEC61937 = 1,
-}
\ No newline at end of file
+namespace clearkeydrm {
+
+struct Buffer;
+
+::android::sp<Buffer> decodeBase64(const std::string& s);
+
+void encodeBase64(const void* data, size_t size, std::string* out);
+
+void encodeBase64Url(const void* data, size_t size, std::string* out);
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Buffer.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Buffer.h
new file mode 100644
index 0000000..d41c4f3
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Buffer.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <utils/RefBase.h>
+
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+struct Buffer : public ::android::RefBase {
+    explicit Buffer(size_t capacity);
+
+    uint8_t* base() { return reinterpret_cast<uint8_t*>(mData); }
+    uint8_t* data() { return reinterpret_cast<uint8_t*>(mData) + mRangeOffset; }
+    size_t capacity() const { return mCapacity; }
+    size_t size() const { return mRangeLength; }
+    size_t offset() const { return mRangeOffset; }
+
+  protected:
+    virtual ~Buffer();
+
+  private:
+    void* mData;
+    size_t mCapacity;
+    size_t mRangeOffset;
+    size_t mRangeLength;
+
+    bool mOwnsData;
+
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(Buffer);
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
new file mode 100644
index 0000000..bfda388
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+namespace clearkeydrm {
+static const std::string kVendorKey("vendor");
+static const std::string kVendorValue("Google");
+static const std::string kVersionKey("version");
+static const std::string kVersionValue("1.2");
+static const std::string kPluginDescriptionKey("description");
+static const std::string kPluginDescriptionValue("ClearKey CDM");
+static const std::string kAlgorithmsKey("algorithms");
+static const std::string kAlgorithmsValue("");
+static const std::string kListenerTestSupportKey("listenerTestSupport");
+static const std::string kListenerTestSupportValue("true");
+static const std::string kDrmErrorTestKey("drmErrorTest");
+static const std::string kDrmErrorTestValue("");
+static const std::string kResourceContentionValue("resourceContention");
+static const std::string kLostStateValue("lostState");
+static const std::string kFrameTooLargeValue("frameTooLarge");
+static const std::string kInvalidStateValue("invalidState");
+static const std::string kAidlVersionKey("aidlVersion");
+
+static const std::string kDeviceIdKey("deviceId");
+static const uint8_t kTestDeviceIdData[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
+                                            0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf};
+
+// settable byte array property
+static const std::string kClientIdKey("clientId");
+
+// TODO stub out metrics for nw
+static const std::string kMetricsKey("metrics");
+static const uint8_t kMetricsData[] = {0};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyTypes.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyTypes.h
new file mode 100644
index 0000000..0cc9511
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyTypes.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+#include <map>
+#include <vector>
+
+namespace clearkeydrm {
+
+const uint8_t kBlockSize = 16;  // AES_BLOCK_SIZE;
+typedef uint8_t KeyId[kBlockSize];
+typedef uint8_t Iv[kBlockSize];
+
+typedef std::map<std::vector<uint8_t>, std::vector<uint8_t>> KeyMap;
+
+#define CLEARKEY_DISALLOW_COPY_AND_ASSIGN(TypeName) \
+    TypeName(const TypeName&) = delete;             \
+    void operator=(const TypeName&) = delete;
+
+#define CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(TypeName) \
+    TypeName() = delete;                                    \
+    TypeName(const TypeName&) = delete;                     \
+    void operator=(const TypeName&) = delete;
+
+enum CdmResponseType : int32_t {
+    OK = 0,
+    ERROR_NO_LICENSE = 1,
+    ERROR_SESSION_NOT_OPENED = 3,
+    ERROR_CANNOT_HANDLE = 4,
+    ERROR_INVALID_STATE = 5,
+    BAD_VALUE = 6,
+    ERROR_DECRYPT = 11,
+    ERROR_UNKNOWN = 12,
+    ERROR_INSUFFICIENT_SECURITY = 13,
+    ERROR_FRAME_TOO_LARGE = 14,
+    ERROR_SESSION_LOST_STATE = 15,
+    ERROR_RESOURCE_CONTENTION = 16,
+};
+
+enum CdmKeyType : int32_t {
+    KEY_TYPE_OFFLINE = 0,
+    KEY_TYPE_STREAMING = 1,
+    KEY_TYPE_RELEASE = 2,
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/DeviceFiles.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/DeviceFiles.h
new file mode 100644
index 0000000..5698441
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/DeviceFiles.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <errno.h>
+#include <stdio.h>
+#include <unistd.h>
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "ClearKeyTypes.h"
+#include "MemoryFileSystem.h"
+
+namespace clearkeydrm {
+class OfflineFile;
+class DeviceFiles {
+  public:
+    typedef enum {
+        kLicenseStateUnknown,
+        kLicenseStateActive,
+        kLicenseStateReleasing,
+    } LicenseState;
+
+    DeviceFiles(){};
+    virtual ~DeviceFiles(){};
+
+    virtual bool StoreLicense(const std::string& keySetId, LicenseState state,
+                              const std::string& keyResponse);
+
+    virtual bool RetrieveLicense(const std::string& key_set_id, LicenseState* state,
+                                 std::string* offlineLicense);
+
+    virtual bool LicenseExists(const std::string& keySetId);
+
+    virtual std::vector<std::string> ListLicenses() const;
+
+    virtual bool DeleteLicense(const std::string& keySetId);
+
+    virtual bool DeleteAllLicenses();
+
+  private:
+    bool FileExists(const std::string& path) const;
+    ssize_t GetFileSize(const std::string& fileName) const;
+    bool RemoveFile(const std::string& fileName);
+
+    bool RetrieveHashedFile(
+            const std::string& fileName,
+            OfflineFile* deSerializedFile);
+    bool StoreFileRaw(const std::string& fileName, const std::string& serializedFile);
+    bool StoreFileWithHash(const std::string& fileName, const std::string& serializedFile);
+
+    MemoryFileSystem mFileHandle;
+
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(DeviceFiles);
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/InitDataParser.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/InitDataParser.h
new file mode 100644
index 0000000..8ecc8e3
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/InitDataParser.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+
+class InitDataParser {
+  public:
+    InitDataParser() {}
+
+    CdmResponseType parse(const std::vector<uint8_t>& initData, const std::string& mimeType,
+                          CdmKeyType keyType, std::vector<uint8_t>* licenseRequest);
+
+  private:
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(InitDataParser);
+
+    CdmResponseType parsePssh(const std::vector<uint8_t>& initData,
+                              std::vector<const uint8_t*>* keyIds);
+
+    std::string generateRequest(CdmKeyType keyType, const std::vector<const uint8_t*>& keyIds);
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/JsonWebKey.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/JsonWebKey.h
new file mode 100644
index 0000000..6681553
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/JsonWebKey.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+#include <vector>
+
+#include "jsmn.h"
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+
+class JsonWebKey {
+  public:
+    JsonWebKey();
+    virtual ~JsonWebKey();
+
+    bool extractKeysFromJsonWebKeySet(const std::string& jsonWebKeySet, KeyMap* keys);
+
+  private:
+    std::vector<jsmntok_t> mJsmnTokens;
+    std::vector<std::string> mJsonObjects;
+    std::vector<std::string> mTokens;
+
+    bool decodeBase64String(const std::string& encodedText, std::vector<uint8_t>* decodedText);
+    bool findKey(const std::string& jsonObject, std::string* keyId, std::string* encodedKey);
+    void findValue(const std::string& key, std::string* value);
+    bool isJsonWebKeySet(const std::string& jsonObject) const;
+    bool parseJsonObject(const std::string& jsonObject, std::vector<std::string>* tokens);
+    bool parseJsonWebKeySet(const std::string& jsonWebKeySet,
+                            std::vector<std::string>* jsonObjects);
+
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(JsonWebKey);
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/MemoryFileSystem.h
new file mode 100644
index 0000000..5642a0f
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/MemoryFileSystem.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <map>
+#include <string>
+
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+
+// Using android file system requires clearkey plugin to update
+// its sepolicy. However, we are unable to update sepolicy for
+// older vendor partitions. To provide backward compatibility,
+// clearkey plugin implements a very simple file system in memory.
+// This memory file system does not support directory structure.
+class MemoryFileSystem {
+  public:
+    struct MemoryFile {
+        std::string fileName;  // excludes path
+        std::string content;
+        size_t fileSize;
+
+        std::string getContent() const { return content; }
+        size_t getFileSize() const { return fileSize; }
+        void setContent(const std::string& file) { content = file; }
+        void setFileName(const std::string& name) { fileName = name; }
+        void setFileSize(size_t size) {
+            content.resize(size);
+            fileSize = size;
+        }
+    };
+
+    MemoryFileSystem(){};
+    virtual ~MemoryFileSystem(){};
+
+    bool FileExists(const std::string& fileName) const;
+    ssize_t GetFileSize(const std::string& fileName) const;
+    std::vector<std::string> ListFiles() const;
+    size_t Read(const std::string& pathName, std::string* buffer);
+    bool RemoveAllFiles();
+    bool RemoveFile(const std::string& fileName);
+    size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
+
+  private:
+    // License file name is made up of a unique keySetId, therefore,
+    // the filename can be used as the key to locate licenses in the
+    // memory file system.
+    std::map<std::string, MemoryFile> mMemoryFileSystem;
+
+    std::string GetFileName(const std::string& path);
+
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(MemoryFileSystem);
+};
+
+}  // namespace clearkeydrm
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/MimeTypeStdStr.h
similarity index 62%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
copy to drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/MimeTypeStdStr.h
index b08a604..dea2974 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/MimeTypeStdStr.h
@@ -13,17 +13,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#pragma once
 
-package android.media;
+#include <string>
 
-/**
- * Audio encapsulation type is used to describe if the audio data should be sent with a particular
- * encapsulation type or not.
- *
- * {@hide}
- */
-@Backing(type="int")
-enum AudioEncapsulationType {
-    NONE     = 0,
-    IEC61937 = 1,
-}
\ No newline at end of file
+namespace {
+const std::string kCencInitDataFormat("cenc");
+const std::string kIsoBmffAudioMimeType("audio/mp4");
+const std::string kIsoBmffVideoMimeType("video/mp4");
+const std::string kWebmInitDataFormat("webm");
+const std::string kWebmAudioMimeType("audio/webm");
+const std::string kWebmVideoMimeType("video/webm");
+}  // namespace
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Session.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Session.h
new file mode 100644
index 0000000..e2d4e32
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/Session.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <utils/Mutex.h>
+#include <utils/RefBase.h>
+
+#include <cstdint>
+#include <vector>
+
+#include "ClearKeyTypes.h"
+
+namespace clearkeydrm {
+
+class Session : public ::android::RefBase {
+  public:
+    explicit Session(const std::vector<uint8_t>& sessionId)
+        : mSessionId(sessionId), mMockError(clearkeydrm::OK) {}
+    virtual ~Session() {}
+
+    const std::vector<uint8_t>& sessionId() const { return mSessionId; }
+
+    CdmResponseType getKeyRequest(const std::vector<uint8_t>& initDataType,
+                                  const std::string& mimeType,
+                                  CdmKeyType keyType,
+                                  std::vector<uint8_t>* keyRequest) const;
+
+    CdmResponseType provideKeyResponse(const std::vector<uint8_t>& response);
+
+    CdmResponseType decrypt(const KeyId keyId, const Iv iv, const uint8_t* srcPtr, uint8_t* dstPtr,
+                            const std::vector<int32_t>& clearDataLengths,
+                            const std::vector<int32_t>& encryptedDataLengths,
+                            size_t* bytesDecryptedOut);
+
+    void setMockError(CdmResponseType error) { mMockError = error; }
+    CdmResponseType getMockError() const { return mMockError; }
+
+  private:
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(Session);
+
+    const std::vector<uint8_t> mSessionId;
+    KeyMap mKeyMap;
+    ::android::Mutex mMapLock;
+
+    // For mocking error return scenarios
+    CdmResponseType mMockError;
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
new file mode 100644
index 0000000..17d4a22
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <utils/Mutex.h>
+#include <utils/RefBase.h>
+
+#include "ClearKeyTypes.h"
+#include "Session.h"
+
+namespace clearkeydrm {
+
+class SessionLibrary {
+  public:
+    static SessionLibrary* get();
+
+    ::android::sp<Session> createSession();
+
+    ::android::sp<Session> findSession(const std::vector<uint8_t>& sessionId);
+
+    void destroySession(const ::android::sp<Session>& session);
+
+    size_t numOpenSessions() const { return mSessions.size(); }
+
+  private:
+    CLEARKEY_DISALLOW_COPY_AND_ASSIGN(SessionLibrary);
+
+    SessionLibrary() : mNextSessionId(1) {}
+
+    static ::android::Mutex sSingletonLock;
+    static SessionLibrary* sSingleton;
+
+    ::android::Mutex mSessionsLock;
+    uint32_t mNextSessionId;
+    std::map<std::vector<uint8_t>, ::android::sp<Session>> mSessions;
+};
+
+}  // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/common/protos/DeviceFiles.proto b/drm/mediadrm/plugins/clearkey/common/protos/DeviceFiles.proto
new file mode 100644
index 0000000..2d98656
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/common/protos/DeviceFiles.proto
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+syntax = "proto2";
+
+package clearkeydrm;
+
+// need this if we are using libprotobuf-cpp-2.3.0-lite
+option optimize_for = LITE_RUNTIME;
+
+message License {
+  enum LicenseState {
+    ACTIVE = 1;
+    RELEASING = 2;
+  }
+
+  optional LicenseState state = 1;
+  optional bytes license = 2;
+}
+
+message OfflineFile {
+  enum FileType {
+    LICENSE = 1;
+  }
+
+  enum FileVersion {
+    VERSION_1 = 1;
+  }
+
+  optional FileType type = 1;
+  optional FileVersion version = 2 [default = VERSION_1];
+  optional License license = 3;
+
+}
+
+message HashedFile {
+  optional bytes file = 1;
+  // A raw (not hex-encoded) SHA256, taken over the bytes of 'file'.
+  optional bytes hash = 2;
+}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index 6c68532..b82d996 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -93,6 +93,11 @@
     srcs: ["protos/DeviceFiles.proto"],
 }
 
+cc_library {
+    name: "libclearkeyhidl",
+    defaults: ["clearkey_service_defaults"],
+}
+
 cc_binary {
     name: "android.hardware.drm@1.2-service.clearkey",
     defaults: ["clearkey_service_defaults"],
@@ -126,3 +131,37 @@
     init_rc: ["android.hardware.drm@1.4-service-lazy.clearkey.rc"],
     vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
 }
+
+cc_fuzz {
+    name: "clearkeyV1.4_fuzzer",
+    vendor: true,
+    srcs: [
+        "fuzzer/clearkeyV1.4_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libclearkeyhidl",
+        "libclearkeycommon",
+        "libclearkeydevicefiles-protos",
+        "libjsmn",
+        "libprotobuf-cpp-lite",
+    ],
+    shared_libs: [
+        "android.hidl.allocator@1.0",
+        "android.hardware.drm@1.0",
+        "android.hardware.drm@1.1",
+        "android.hardware.drm@1.2",
+        "android.hardware.drm@1.3",
+        "android.hardware.drm@1.4",
+        "libcrypto",
+        "libhidlbase",
+        "libhidlmemory",
+        "liblog",
+        "libutils",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index 6a374f9..32d7723 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -187,7 +187,7 @@
         return Status_V1_2::ERROR_DRM_CANNOT_HANDLE;
     }
 
-    *defaultUrl = "";
+    *defaultUrl = "https://default.url";
     *keyRequestType = KeyRequestType_V1_1::UNKNOWN;
     *request = std::vector<uint8_t>();
 
@@ -221,7 +221,6 @@
         if (requestString.find(kOfflineLicense) != std::string::npos) {
             std::string emptyResponse;
             std::string keySetIdString(keySetId.begin(), keySetId.end());
-            Mutex::Autolock lock(mFileHandleLock);
             if (!mFileHandle.StoreLicense(keySetIdString,
                     DeviceFiles::kLicenseStateReleasing,
                     emptyResponse)) {
@@ -337,7 +336,6 @@
         }
         *keySetId = kKeySetIdPrefix + ByteArrayToHexString(
                 reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
-        Mutex::Autolock lock(mFileHandleLock);
         if (mFileHandle.LicenseExists(*keySetId)) {
             // collision, regenerate
             ALOGV("Retry generating KeySetId");
@@ -395,7 +393,6 @@
     if (status == Status::OK) {
         if (isOfflineLicense) {
             if (isRelease) {
-                Mutex::Autolock lock(mFileHandleLock);
                 mFileHandle.DeleteLicense(keySetId);
                 mSessionLibrary->destroySession(session);
             } else {
@@ -404,7 +401,6 @@
                     return Void();
                 }
 
-                Mutex::Autolock lock(mFileHandleLock);
                 bool ok = mFileHandle.StoreLicense(
                         keySetId,
                         DeviceFiles::kLicenseStateActive,
@@ -459,7 +455,6 @@
         DeviceFiles::LicenseState licenseState;
         std::string offlineLicense;
         Status status = Status::OK;
-        Mutex::Autolock lock(mFileHandleLock);
         if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
                 &licenseState, &offlineLicense)) {
             ALOGE("Failed to restore offline license");
@@ -769,8 +764,6 @@
 }
 
 Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
-    Mutex::Autolock lock(mFileHandleLock);
-
     std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
     std::vector<KeySetId> keySetIds;
     if (mMockError != Status_V1_2::OK) {
@@ -791,7 +784,6 @@
         return toStatus_1_0(mMockError);
     }
     std::string licenseName(keySetId.begin(), keySetId.end());
-    Mutex::Autolock lock(mFileHandleLock);
     if (mFileHandle.DeleteLicense(licenseName)) {
         return Status::OK;
     }
@@ -800,8 +792,6 @@
 
 Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
         getOfflineLicenseState_cb _hidl_cb) {
-    Mutex::Autolock lock(mFileHandleLock);
-
     std::string licenseName(keySetId.begin(), keySetId.end());
     DeviceFiles::LicenseState state;
     std::string license;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index e61db3f..56910be 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -24,13 +24,11 @@
 }
 
 bool MemoryFileSystem::FileExists(const std::string& fileName) const {
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     return result != mMemoryFileSystem.end();
 }
 
 ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         return static_cast<ssize_t>(result->second.getFileSize());
@@ -42,7 +40,6 @@
 
 std::vector<std::string> MemoryFileSystem::ListFiles() const {
     std::vector<std::string> list;
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     for (const auto& filename : mMemoryFileSystem) {
         list.push_back(filename.first);
     }
@@ -51,7 +48,6 @@
 
 size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
     std::string key = GetFileName(path);
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         std::string serializedHashFile = result->second.getContent();
@@ -65,7 +61,6 @@
 
 size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
     std::string key = GetFileName(path);
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(key);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(key);
@@ -75,7 +70,6 @@
 }
 
 bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     auto result = mMemoryFileSystem.find(fileName);
     if (result != mMemoryFileSystem.end()) {
         mMemoryFileSystem.erase(result);
@@ -87,7 +81,6 @@
 }
 
 bool MemoryFileSystem::RemoveAllFiles() {
-    std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
     mMemoryFileSystem.clear();
     return mMemoryFileSystem.empty();
 }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
index 9afd3d7..ec4517d 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
@@ -11,4 +11,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
index c1abe7f..3b48cf2 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
@@ -10,4 +10,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
index 1e0d431..6e64978 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
@@ -13,4 +13,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
index 8130511..e302e1b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
@@ -11,4 +11,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
index 46aba88..84a63a1 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
@@ -15,4 +15,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
index 8186933..649599e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
@@ -13,4 +13,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md
new file mode 100644
index 0000000..cb45460
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md
@@ -0,0 +1,52 @@
+# Fuzzer for android.hardware.drm@1.4-service.clearkey
+
+## Plugin Design Considerations
+The fuzzer plugin for android.hardware.drm@1.4-service.clearkey is designed based on the understanding of the
+source code and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+android.hardware.drm@1.4-service.clearkey supports the following parameters:
+1. Security Level (parameter name: `securityLevel`)
+2. Mime Type (parameter name: `mimeType`)
+3. Key Type (parameter name: `keyType`)
+4. Crypto Mode (parameter name: `cryptoMode`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `securityLevel` | 0.`SecurityLevel::UNKNOWN` 1.`SecurityLevel::SW_SECURE_CRYPTO` 2.`SecurityLevel::SW_SECURE_DECODE` 3.`SecurityLevel::HW_SECURE_CRYPTO`  4.`SecurityLevel::HW_SECURE_DECODE` 5.`SecurityLevel::HW_SECURE_ALL`| Value obtained from FuzzedDataProvider in the range 0 to 5|
+| `mimeType` | 0.`video/mp4` 1.`video/mpeg` 2.`video/x-flv` 3.`video/mj2` 4.`video/3gp2` 5.`video/3gpp` 6.`video/3gpp2` 7.`audio/mp4` 8.`audio/mpeg` 9.`audio/aac` 10.`audio/3gp2` 11.`audio/3gpp` 12.`audio/3gpp2` 13.`audio/webm` 14.`video/webm` 15.`webm` 16.`cenc` 17.`video/unknown` 18.`audio/unknown`| Value obtained from FuzzedDataProvider in the range 0 to 18|
+| `keyType` | 0.`KeyType::OFFLINE` 1.`KeyType::STREAMING` 2.`KeyType::RELEASE` | Value obtained from FuzzedDataProvider in the range 0 to 2|
+| `cryptoMode` | 0.`Mode::UNENCRYPTED` 1.`Mode::AES_CTR` 2.`Mode::AES_CBC_CTS` 3.`Mode::AES_CBC` | Value obtained from FuzzedDataProvider in the range 0 to 3|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build clearkeyV1.4_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) clearkeyV1.4_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/clearkeyV1.4_fuzzer/vendor/hw/clearkeyV1.4_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp
new file mode 100644
index 0000000..afe0e6c
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp
@@ -0,0 +1,719 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <include/CreatePluginFactories.h>
+
+#include <android/hidl/allocator/1.0/IAllocator.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <hidlmemory/mapping.h>
+#include <include/ClearKeyDrmProperties.h>
+#include <include/CryptoFactory.h>
+#include <include/CryptoPlugin.h>
+#include <include/DrmPlugin.h>
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+namespace drm = ::android::hardware::drm;
+using namespace std;
+using namespace android;
+using ::android::sp;
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hidl::allocator::V1_0::IAllocator;
+using ::android::hidl::memory::V1_0::IMemory;
+using drm::V1_0::BufferType;
+using drm::V1_0::DestinationBuffer;
+using drm::V1_0::EventType;
+using drm::V1_0::ICryptoPlugin;
+using drm::V1_0::IDrmPlugin;
+using drm::V1_0::IDrmPluginListener;
+using drm::V1_0::KeyedVector;
+using drm::V1_0::KeyStatus;
+using drm::V1_0::KeyStatusType;
+using drm::V1_0::KeyType;
+using drm::V1_0::Mode;
+using drm::V1_0::Pattern;
+using drm::V1_0::SecureStop;
+using drm::V1_0::SharedBuffer;
+using drm::V1_0::Status;
+using drm::V1_0::SubSample;
+using drm::V1_1::DrmMetricGroup;
+using drm::V1_1::HdcpLevel;
+using drm::V1_1::SecureStopRelease;
+using drm::V1_1::SecurityLevel;
+using drm::V1_2::KeySetId;
+using drm::V1_2::OfflineLicenseState;
+using drm::V1_4::clearkey::ICryptoFactory;
+using drm::V1_4::clearkey::IDrmFactory;
+using drm::V1_4::clearkey::kAlgorithmsKey;
+using drm::V1_4::clearkey::kClientIdKey;
+using drm::V1_4::clearkey::kDeviceIdKey;
+using drm::V1_4::clearkey::kDrmErrorTestKey;
+using drm::V1_4::clearkey::kListenerTestSupportKey;
+using drm::V1_4::clearkey::kMetricsKey;
+using drm::V1_4::clearkey::kPluginDescriptionKey;
+using drm::V1_4::clearkey::kVendorKey;
+using drm::V1_4::clearkey::kVersionKey;
+
+typedef ::android::hardware::hidl_vec<uint8_t> SessionId;
+typedef ::android::hardware::hidl_vec<uint8_t> SecureStopId;
+
+static const uint8_t kInvalidUUID[] = {0x10, 0x20, 0x30, 0x40, 0x50, 0x60,
+                                       0x70, 0x80, 0x10, 0x20, 0x30, 0x40,
+                                       0x50, 0x60, 0x70, 0x80};
+
+static const uint8_t kClearKeyUUID[] = {0xE2, 0x71, 0x9D, 0x58, 0xA9, 0x85,
+                                        0xB3, 0xC9, 0x78, 0x1A, 0xB0, 0x30,
+                                        0xAF, 0x78, 0xD3, 0x0E};
+
+const SecurityLevel kSecurityLevel[] = {
+    SecurityLevel::UNKNOWN,          SecurityLevel::SW_SECURE_CRYPTO,
+    SecurityLevel::SW_SECURE_DECODE, SecurityLevel::HW_SECURE_CRYPTO,
+    SecurityLevel::HW_SECURE_DECODE, SecurityLevel::HW_SECURE_ALL};
+
+const char *kMimeType[] = {
+    "video/mp4",  "video/mpeg",  "video/x-flv",   "video/mj2",    "video/3gp2",
+    "video/3gpp", "video/3gpp2", "audio/mp4",     "audio/mpeg",   "audio/aac",
+    "audio/3gp2", "audio/3gpp",  "audio/3gpp2",   "audio/webm",   "video/webm",
+    "webm",       "cenc",        "video/unknown", "audio/unknown"};
+
+const char *kCipherAlgorithm[] = {"AES/CBC/NoPadding", ""};
+
+const char *kMacAlgorithm[] = {"HmacSHA256", ""};
+
+const char *kRSAAlgorithm[] = {"RSASSA-PSS-SHA1", ""};
+
+const std::string kProperty[] = {kVendorKey,
+                                 kVersionKey,
+                                 kPluginDescriptionKey,
+                                 kAlgorithmsKey,
+                                 kListenerTestSupportKey,
+                                 kDrmErrorTestKey,
+                                 kDeviceIdKey,
+                                 kClientIdKey,
+                                 kMetricsKey,
+                                 "placeholder"};
+
+const KeyType kKeyType[] = {KeyType::OFFLINE, KeyType::STREAMING,
+                            KeyType::RELEASE};
+
+const Mode kCryptoMode[] = {Mode::UNENCRYPTED, Mode::AES_CTR, Mode::AES_CBC_CTS,
+                            Mode::AES_CBC};
+
+const hidl_vec<uint8_t> validInitData = {
+    // BMFF box header (4 bytes size + 'pssh')
+    0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
+    // full box header (version = 1 flags = 0)
+    0x01, 0x00, 0x00, 0x00,
+    // system id
+    0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02, 0xac, 0xe3, 0x3c, 0x1e,
+    0x52, 0xe2, 0xfb, 0x4b,
+    // number of key ids
+    0x00, 0x00, 0x00, 0x01,
+    // key id
+    0x60, 0x06, 0x1e, 0x01, 0x7e, 0x47, 0x7e, 0x87, 0x7e, 0x57, 0xd0, 0x0d,
+    0x1e, 0xd0, 0x0d, 0x1e,
+    // size of data, must be zero
+    0x00, 0x00, 0x00, 0x00};
+
+const hidl_vec<uint8_t> validKeyResponse = {
+    0x7b, 0x22, 0x6b, 0x65, 0x79, 0x73, 0x22, 0x3a, 0x5b, 0x7b, 0x22,
+    0x6b, 0x74, 0x79, 0x22, 0x3a, 0x22, 0x6f, 0x63, 0x74, 0x22, 0x2c,
+    0x22, 0x6b, 0x69, 0x64, 0x22, 0x3a, 0x22, 0x59, 0x41, 0x59, 0x65,
+    0x41, 0x58, 0x35, 0x48, 0x66, 0x6f, 0x64, 0x2d, 0x56, 0x39, 0x41,
+    0x4e, 0x48, 0x74, 0x41, 0x4e, 0x48, 0x67, 0x22, 0x2c, 0x22, 0x6b,
+    0x22, 0x3a, 0x22, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x54, 0x65,
+    0x73, 0x74, 0x4b, 0x65, 0x79, 0x42, 0x61, 0x73, 0x65, 0x36, 0x34,
+    0x67, 0x67, 0x67, 0x22, 0x7d, 0x5d, 0x7d, 0x0a};
+
+const size_t kAESBlockSize = 16;
+const size_t kMaxStringLength = 100;
+const size_t kMaxSubSamples = 10;
+const size_t kMaxNumBytes = 1000;
+const size_t kSegmentIndex = 0;
+
+template <typename T, size_t size>
+T getValueFromArray(FuzzedDataProvider *fdp, const T (&arr)[size]) {
+  return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
+}
+
+class TestDrmPluginListener : public IDrmPluginListener {
+public:
+  TestDrmPluginListener() {}
+  virtual ~TestDrmPluginListener() {}
+
+  virtual Return<void> sendEvent(EventType /*eventType*/,
+                                 const hidl_vec<uint8_t> & /*sessionId*/,
+                                 const hidl_vec<uint8_t> & /*data*/) override {
+    return Return<void>();
+  }
+
+  virtual Return<void>
+  sendExpirationUpdate(const hidl_vec<uint8_t> & /*sessionId*/,
+                       int64_t /*expiryTimeInMS*/) override {
+    return Return<void>();
+  }
+
+  virtual Return<void>
+  sendKeysChange(const hidl_vec<uint8_t> & /*sessionId*/,
+                 const hidl_vec<KeyStatus> & /*keyStatusList*/,
+                 bool /*hasNewUsableKey*/) override {
+    return Return<void>();
+  }
+};
+
+class ClearKeyFuzzer {
+public:
+  ~ClearKeyFuzzer() { deInit(); }
+  bool init();
+  void process(const uint8_t *data, size_t size);
+
+private:
+  void deInit();
+  void invokeDrmPlugin(const uint8_t *data, size_t size);
+  void invokeCryptoPlugin(const uint8_t *data);
+  void invokeDrm(const uint8_t *data, size_t size);
+  void invokeCrypto(const uint8_t *data);
+  void invokeDrmDecryptEncryptAPI(const uint8_t *data, size_t size);
+  bool invokeDrmFactory();
+  bool invokeCryptoFactory();
+  void invokeDrmV1_4API();
+  void invokeDrmSetAlgorithmAPI();
+  void invokeDrmPropertyAPI();
+  void invokeDrmSecureStopAPI();
+  void invokeDrmOfflineLicenseAPI(const uint8_t *data, size_t size);
+  SessionId getSessionId();
+  SecureStopRelease makeSecureRelease(const SecureStop &stop);
+  sp<IDrmFactory> mDrmFactory = nullptr;
+  sp<ICryptoFactory> mCryptoFactory = nullptr;
+  sp<IDrmPlugin> mDrmPlugin = nullptr;
+  sp<drm::V1_1::IDrmPlugin> mDrmPluginV1_1 = nullptr;
+  sp<drm::V1_2::IDrmPlugin> mDrmPluginV1_2 = nullptr;
+  sp<drm::V1_4::IDrmPlugin> mDrmPluginV1_4 = nullptr;
+  sp<drm::V1_4::ICryptoPlugin> mCryptoPluginV1_4 = nullptr;
+  sp<ICryptoPlugin> mCryptoPlugin = nullptr;
+  FuzzedDataProvider *mFDP = nullptr;
+  SessionId mSessionId = {};
+  SessionId mSessionIdV1 = {};
+};
+
+void ClearKeyFuzzer::deInit() {
+  if (mDrmPluginV1_1) {
+    mDrmPluginV1_1->closeSession(mSessionIdV1);
+  }
+  if (mDrmPluginV1_2) {
+    mDrmPluginV1_2->closeSession(mSessionId);
+  }
+  mDrmFactory.clear();
+  mCryptoFactory.clear();
+  mDrmPlugin.clear();
+  mDrmPluginV1_1.clear();
+  mDrmPluginV1_2.clear();
+  mDrmPluginV1_4.clear();
+  mCryptoPlugin.clear();
+  mCryptoPluginV1_4.clear();
+  mSessionId = {};
+  mSessionIdV1 = {};
+}
+
+void ClearKeyFuzzer::invokeDrmV1_4API() {
+  mDrmPluginV1_4->requiresSecureDecoderDefault(
+      getValueFromArray(mFDP, kMimeType));
+  mDrmPluginV1_4->requiresSecureDecoder(
+      getValueFromArray(mFDP, kMimeType),
+      getValueFromArray(mFDP, kSecurityLevel));
+  mDrmPluginV1_4->setPlaybackId(
+      mSessionId, mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str());
+  drm::V1_4::IDrmPlugin::getLogMessages_cb cb =
+      [&]([[maybe_unused]] drm::V1_4::Status status,
+          [[maybe_unused]] hidl_vec<drm::V1_4::LogMessage> logs) {};
+  mDrmPluginV1_4->getLogMessages(cb);
+}
+
+void ClearKeyFuzzer::invokeDrmSetAlgorithmAPI() {
+  const hidl_string cipherAlgo =
+      mFDP->ConsumeBool()
+          ? mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str()
+          : hidl_string(kCipherAlgorithm[mFDP->ConsumeBool()]);
+  mDrmPluginV1_2->setCipherAlgorithm(mSessionId, cipherAlgo);
+
+  const hidl_string macAlgo =
+      mFDP->ConsumeBool()
+          ? mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str()
+          : hidl_string(kMacAlgorithm[mFDP->ConsumeBool()]);
+  mDrmPluginV1_2->setMacAlgorithm(mSessionId, macAlgo);
+}
+
+void ClearKeyFuzzer::invokeDrmPropertyAPI() {
+  mDrmPluginV1_2->setPropertyString(
+      hidl_string(getValueFromArray(mFDP, kProperty)), hidl_string("value"));
+
+  hidl_string stringValue;
+  mDrmPluginV1_2->getPropertyString(
+      getValueFromArray(mFDP, kProperty),
+      [&](Status status, const hidl_string &hValue) {
+        if (status == Status::OK) {
+          stringValue = hValue;
+        }
+      });
+
+  hidl_vec<uint8_t> value = {};
+  mDrmPluginV1_2->setPropertyByteArray(
+      hidl_string(getValueFromArray(mFDP, kProperty)), value);
+
+  hidl_vec<uint8_t> byteValue;
+  mDrmPluginV1_2->getPropertyByteArray(
+      getValueFromArray(mFDP, kProperty),
+      [&](Status status, const hidl_vec<uint8_t> &hValue) {
+        if (status == Status::OK) {
+          byteValue = hValue;
+        }
+      });
+}
+
+SessionId ClearKeyFuzzer::getSessionId() {
+  SessionId emptySessionId = {};
+  return mFDP->ConsumeBool() ? mSessionId : emptySessionId;
+}
+
+void ClearKeyFuzzer::invokeDrmDecryptEncryptAPI(const uint8_t *data,
+                                                size_t size) {
+  uint32_t currSessions, maximumSessions;
+  mDrmPluginV1_2->getNumberOfSessions(
+      [&](Status status, uint32_t hCurrentSessions, uint32_t hMaxSessions) {
+        if (status == Status::OK) {
+          currSessions = hCurrentSessions;
+          maximumSessions = hMaxSessions;
+        }
+      });
+
+  HdcpLevel connected, maximum;
+  mDrmPluginV1_2->getHdcpLevels([&](Status status,
+                                    const HdcpLevel &hConnectedLevel,
+                                    const HdcpLevel &hMaxLevel) {
+    if (status == Status::OK) {
+      connected = hConnectedLevel;
+      maximum = hMaxLevel;
+    }
+  });
+
+  drm::V1_2::HdcpLevel connectedV1_2, maximumV1_2;
+  mDrmPluginV1_2->getHdcpLevels_1_2(
+      [&](drm::V1_2::Status status, const drm::V1_2::HdcpLevel &connectedLevel,
+          const drm::V1_2::HdcpLevel &maxLevel) {
+        if (status == drm::V1_2::Status::OK) {
+          connectedV1_2 = connectedLevel;
+          maximumV1_2 = maxLevel;
+        }
+      });
+
+  SecurityLevel securityLevel;
+  mDrmPluginV1_2->getSecurityLevel(mSessionId,
+                                   [&](Status status, SecurityLevel hLevel) {
+                                     if (status == Status::OK) {
+                                       securityLevel = hLevel;
+                                     }
+                                   });
+
+  hidl_vec<DrmMetricGroup> metrics;
+  mDrmPluginV1_2->getMetrics(
+      [&](Status status, hidl_vec<DrmMetricGroup> hMetricGroups) {
+        if (status == Status::OK) {
+          metrics = hMetricGroups;
+        }
+      });
+
+  hidl_string certificateType;
+  hidl_string certificateAuthority;
+  mDrmPluginV1_2->getProvisionRequest(certificateType, certificateAuthority,
+                                      [&]([[maybe_unused]] Status status,
+                                          const hidl_vec<uint8_t> &,
+                                          const hidl_string &) {});
+
+  mDrmPluginV1_2->getProvisionRequest_1_2(
+      certificateType, certificateAuthority,
+      [&]([[maybe_unused]] drm::V1_2::Status status, const hidl_vec<uint8_t> &,
+          const hidl_string &) {});
+
+  hidl_vec<uint8_t> response;
+  mDrmPluginV1_2->provideProvisionResponse(
+      response, [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &,
+                    const hidl_vec<uint8_t> &) {});
+
+  hidl_vec<uint8_t> initData = {};
+  if (mFDP->ConsumeBool()) {
+    initData = validInitData;
+  } else {
+    initData.setToExternal(const_cast<uint8_t *>(data), kAESBlockSize);
+  }
+  hidl_string mimeType = getValueFromArray(mFDP, kMimeType);
+  KeyType keyType = mFDP->ConsumeBool()
+                        ? static_cast<KeyType>(mFDP->ConsumeIntegral<size_t>())
+                        : getValueFromArray(mFDP, kKeyType);
+  KeyedVector optionalParameters;
+  mDrmPluginV1_2->getKeyRequest_1_2(
+      mSessionId, initData, mimeType, keyType, optionalParameters,
+      [&]([[maybe_unused]] drm::V1_2::Status status, const hidl_vec<uint8_t> &,
+          drm::V1_1::KeyRequestType, const hidl_string &) {});
+  mDrmPluginV1_1->getKeyRequest_1_1(
+      mSessionIdV1, initData, mimeType, keyType, optionalParameters,
+      [&]([[maybe_unused]] drm::V1_0::Status status, const hidl_vec<uint8_t> &,
+          drm::V1_1::KeyRequestType, const hidl_string &) {});
+  hidl_vec<uint8_t> emptyInitData = {};
+  mDrmPlugin->getKeyRequest(
+      mSessionId, mFDP->ConsumeBool() ? initData : emptyInitData, mimeType,
+      keyType, optionalParameters,
+      [&]([[maybe_unused]] drm::V1_0::Status status, const hidl_vec<uint8_t> &,
+          drm::V1_0::KeyRequestType, const hidl_string &) {});
+
+  hidl_vec<uint8_t> keyResponse = {};
+  if (mFDP->ConsumeBool()) {
+    keyResponse = validKeyResponse;
+  } else {
+    keyResponse.setToExternal(const_cast<uint8_t *>(data), size);
+  }
+  hidl_vec<uint8_t> keySetId;
+  hidl_vec<uint8_t> emptyKeyResponse = {};
+  mDrmPluginV1_2->provideKeyResponse(
+      getSessionId(), mFDP->ConsumeBool() ? keyResponse : emptyKeyResponse,
+      [&](Status status, const hidl_vec<uint8_t> &hKeySetId) {
+        if (status == Status::OK) {
+          keySetId = hKeySetId;
+        }
+      });
+
+  mDrmPluginV1_2->restoreKeys(getSessionId(), keySetId);
+
+  mDrmPluginV1_2->queryKeyStatus(
+      getSessionId(),
+      [&]([[maybe_unused]] Status status, KeyedVector /* info */) {});
+
+  hidl_vec<uint8_t> keyId, input, iv;
+  keyId.setToExternal(const_cast<uint8_t *>(data), size);
+  input.setToExternal(const_cast<uint8_t *>(data), size);
+  iv.setToExternal(const_cast<uint8_t *>(data), size);
+  mDrmPluginV1_2->encrypt(
+      getSessionId(), keyId, input, iv,
+      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
+
+  mDrmPluginV1_2->decrypt(
+      getSessionId(), keyId, input, iv,
+      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
+
+  hidl_vec<uint8_t> message;
+  message.setToExternal(const_cast<uint8_t *>(data), size);
+  mDrmPluginV1_2->sign(
+      getSessionId(), keyId, message,
+      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
+
+  hidl_vec<uint8_t> signature;
+  signature.setToExternal(const_cast<uint8_t *>(data), size);
+  mDrmPluginV1_2->verify(getSessionId(), keyId, message, signature,
+                         [&]([[maybe_unused]] Status status, bool) {});
+
+  hidl_vec<uint8_t> wrappedKey;
+  signature.setToExternal(const_cast<uint8_t *>(data), size);
+  mDrmPluginV1_2->signRSA(
+      getSessionId(), kRSAAlgorithm[mFDP->ConsumeBool()], message, wrappedKey,
+      [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
+
+  mDrmPluginV1_2->removeKeys(getSessionId());
+}
+
+/**
+ * Helper function to create a secure release message for
+ * a secure stop. The clearkey secure stop release format
+ * is just a count followed by the secure stop opaque data.
+ */
+SecureStopRelease ClearKeyFuzzer::makeSecureRelease(const SecureStop &stop) {
+  std::vector<uint8_t> stopData = stop.opaqueData;
+  std::vector<uint8_t> buffer;
+  std::string count = "0001";
+
+  auto it = buffer.insert(buffer.begin(), count.begin(), count.end());
+  buffer.insert(it + count.size(), stopData.begin(), stopData.end());
+  SecureStopRelease release = {.opaqueData = hidl_vec<uint8_t>(buffer)};
+  return release;
+}
+
+void ClearKeyFuzzer::invokeDrmSecureStopAPI() {
+  SecureStopId ssid;
+  mDrmPluginV1_2->getSecureStop(
+      ssid, [&]([[maybe_unused]] Status status, const SecureStop &) {});
+
+  mDrmPluginV1_2->getSecureStopIds(
+      [&]([[maybe_unused]] Status status,
+          [[maybe_unused]] const hidl_vec<SecureStopId> &secureStopIds) {});
+
+  SecureStopRelease release;
+  mDrmPluginV1_2->getSecureStops(
+      [&]([[maybe_unused]] Status status, const hidl_vec<SecureStop> &stops) {
+        if (stops.size() > 0) {
+          release = makeSecureRelease(
+              stops[mFDP->ConsumeIntegralInRange<size_t>(0, stops.size() - 1)]);
+        }
+      });
+
+  mDrmPluginV1_2->releaseSecureStops(release);
+
+  mDrmPluginV1_2->removeSecureStop(ssid);
+
+  mDrmPluginV1_2->removeAllSecureStops();
+
+  mDrmPluginV1_2->releaseSecureStop(ssid);
+
+  mDrmPluginV1_2->releaseAllSecureStops();
+}
+
+void ClearKeyFuzzer::invokeDrmOfflineLicenseAPI(const uint8_t *data,
+                                                size_t size) {
+  hidl_vec<KeySetId> keySetIds = {};
+  mDrmPluginV1_2->getOfflineLicenseKeySetIds(
+      [&](Status status, const hidl_vec<KeySetId> &hKeySetIds) {
+        if (status == Status::OK) {
+          keySetIds = hKeySetIds;
+        }
+      });
+
+  OfflineLicenseState licenseState;
+  KeySetId keySetId = {};
+  if (keySetIds.size() > 0) {
+    keySetId = keySetIds[mFDP->ConsumeIntegralInRange<size_t>(
+        0, keySetIds.size() - 1)];
+  } else {
+    keySetId.setToExternal(const_cast<uint8_t *>(data), size);
+  }
+  mDrmPluginV1_2->getOfflineLicenseState(
+      keySetId, [&](Status status, OfflineLicenseState hLicenseState) {
+        if (status == Status::OK) {
+          licenseState = hLicenseState;
+        }
+      });
+
+  mDrmPluginV1_2->removeOfflineLicense(keySetId);
+}
+
+void ClearKeyFuzzer::invokeDrmPlugin(const uint8_t *data, size_t size) {
+  SecurityLevel secLevel =
+      mFDP->ConsumeBool()
+          ? getValueFromArray(mFDP, kSecurityLevel)
+          : static_cast<SecurityLevel>(mFDP->ConsumeIntegral<uint32_t>());
+  mDrmPluginV1_1->openSession_1_1(
+      secLevel, [&]([[maybe_unused]] Status status, const SessionId &id) {
+        mSessionIdV1 = id;
+      });
+  mDrmPluginV1_2->openSession([&]([[maybe_unused]] Status status,
+                                  const SessionId &id) { mSessionId = id; });
+
+  sp<TestDrmPluginListener> listener = new TestDrmPluginListener();
+  mDrmPluginV1_2->setListener(listener);
+  const hidl_vec<KeyStatus> keyStatusList = {
+      {{1}, KeyStatusType::USABLE},
+      {{2}, KeyStatusType::EXPIRED},
+      {{3}, KeyStatusType::OUTPUTNOTALLOWED},
+      {{4}, KeyStatusType::STATUSPENDING},
+      {{5}, KeyStatusType::INTERNALERROR},
+  };
+  mDrmPluginV1_2->sendKeysChange(mSessionId, keyStatusList, true);
+
+  invokeDrmV1_4API();
+  invokeDrmSetAlgorithmAPI();
+  invokeDrmPropertyAPI();
+  invokeDrmDecryptEncryptAPI(data, size);
+  invokeDrmSecureStopAPI();
+  invokeDrmOfflineLicenseAPI(data, size);
+}
+
+void ClearKeyFuzzer::invokeCryptoPlugin(const uint8_t *data) {
+  mCryptoPlugin->requiresSecureDecoderComponent(
+      getValueFromArray(mFDP, kMimeType));
+
+  const uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
+  const uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
+  mCryptoPlugin->notifyResolution(width, height);
+
+  mCryptoPlugin->setMediaDrmSession(mSessionId);
+
+  size_t totalSize = 0;
+  const size_t numSubSamples =
+      mFDP->ConsumeIntegralInRange<size_t>(1, kMaxSubSamples);
+
+  const Pattern pattern = {0, 0};
+  hidl_vec<SubSample> subSamples;
+  subSamples.resize(numSubSamples);
+
+  for (size_t i = 0; i < numSubSamples; ++i) {
+    const uint32_t clearBytes =
+        mFDP->ConsumeIntegralInRange<uint32_t>(0, kMaxNumBytes);
+    const uint32_t encryptedBytes =
+        mFDP->ConsumeIntegralInRange<uint32_t>(0, kMaxNumBytes);
+    subSamples[i].numBytesOfClearData = clearBytes;
+    subSamples[i].numBytesOfEncryptedData = encryptedBytes;
+    totalSize += subSamples[i].numBytesOfClearData;
+    totalSize += subSamples[i].numBytesOfEncryptedData;
+  }
+
+  // The first totalSize bytes of shared memory is the encrypted
+  // input, the second totalSize bytes is the decrypted output.
+  size_t memoryBytes = totalSize * 2;
+
+  sp<IAllocator> ashmemAllocator = IAllocator::getService("ashmem");
+  if (!ashmemAllocator.get()) {
+    return;
+  }
+
+  hidl_memory hidlMemory;
+  ashmemAllocator->allocate(memoryBytes, [&]([[maybe_unused]] bool success,
+                                             const hidl_memory &memory) {
+    mCryptoPlugin->setSharedBufferBase(memory, kSegmentIndex);
+    hidlMemory = memory;
+  });
+
+  sp<IMemory> mappedMemory = mapMemory(hidlMemory);
+  if (!mappedMemory.get()) {
+    return;
+  }
+  mCryptoPlugin->setSharedBufferBase(hidlMemory, kSegmentIndex);
+
+  uint32_t srcBufferId =
+      mFDP->ConsumeBool() ? kSegmentIndex : mFDP->ConsumeIntegral<uint32_t>();
+  const SharedBuffer sourceBuffer = {
+      .bufferId = srcBufferId, .offset = 0, .size = totalSize};
+
+  BufferType type = mFDP->ConsumeBool() ? BufferType::SHARED_MEMORY
+                                        : BufferType::NATIVE_HANDLE;
+  uint32_t destBufferId =
+      mFDP->ConsumeBool() ? kSegmentIndex : mFDP->ConsumeIntegral<uint32_t>();
+  const DestinationBuffer destBuffer = {
+      .type = type,
+      {.bufferId = destBufferId, .offset = totalSize, .size = totalSize},
+      .secureMemory = nullptr};
+
+  const uint64_t offset = 0;
+  uint32_t bytesWritten = 0;
+  hidl_array<uint8_t, kAESBlockSize> keyId =
+      hidl_array<uint8_t, kAESBlockSize>(data);
+  hidl_array<uint8_t, kAESBlockSize> iv =
+      hidl_array<uint8_t, kAESBlockSize>(data);
+  Mode mode = getValueFromArray(mFDP, kCryptoMode);
+  mCryptoPlugin->decrypt(
+      mFDP->ConsumeBool(), keyId, iv, mode, pattern, subSamples, sourceBuffer,
+      offset, destBuffer,
+      [&]([[maybe_unused]] Status status, uint32_t count,
+          [[maybe_unused]] string detailedError) { bytesWritten = count; });
+  drm::V1_4::IDrmPlugin::getLogMessages_cb cb =
+      [&]([[maybe_unused]] drm::V1_4::Status status,
+          [[maybe_unused]] hidl_vec<drm::V1_4::LogMessage> logs) {};
+  mCryptoPluginV1_4->getLogMessages(cb);
+}
+
+bool ClearKeyFuzzer::invokeDrmFactory() {
+  hidl_string packageName(
+      mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str());
+  hidl_string mimeType(getValueFromArray(mFDP, kMimeType));
+  SecurityLevel securityLevel =
+      mFDP->ConsumeBool()
+          ? getValueFromArray(mFDP, kSecurityLevel)
+          : static_cast<SecurityLevel>(mFDP->ConsumeIntegral<uint32_t>());
+  const hidl_array<uint8_t, 16> uuid =
+      mFDP->ConsumeBool() ? kClearKeyUUID : kInvalidUUID;
+  mDrmFactory->isCryptoSchemeSupported_1_2(uuid, mimeType, securityLevel);
+  mDrmFactory->createPlugin(
+      uuid, packageName, [&](Status status, const sp<IDrmPlugin> &plugin) {
+        if (status == Status::OK) {
+          mDrmPlugin = plugin.get();
+          mDrmPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mDrmPlugin);
+          mDrmPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mDrmPlugin);
+          mDrmPluginV1_4 = drm::V1_4::IDrmPlugin::castFrom(mDrmPlugin);
+        }
+      });
+
+  std::vector<hidl_array<uint8_t, 16>> supportedSchemes;
+  mDrmFactory->getSupportedCryptoSchemes(
+      [&](const hidl_vec<hidl_array<uint8_t, 16>> &schemes) {
+        for (const auto &scheme : schemes) {
+          supportedSchemes.push_back(scheme);
+        }
+      });
+
+  if (!(mDrmPlugin && mDrmPluginV1_1 && mDrmPluginV1_2 && mDrmPluginV1_4)) {
+    return false;
+  }
+  return true;
+}
+
+bool ClearKeyFuzzer::invokeCryptoFactory() {
+  const hidl_array<uint8_t, 16> uuid =
+      mFDP->ConsumeBool() ? kClearKeyUUID : kInvalidUUID;
+  mCryptoFactory->createPlugin(
+      uuid, mSessionId, [this](Status status, const sp<ICryptoPlugin> &plugin) {
+        if (status == Status::OK) {
+          mCryptoPlugin = plugin;
+          mCryptoPluginV1_4 = drm::V1_4::ICryptoPlugin::castFrom(mCryptoPlugin);
+        }
+      });
+
+  if (!mCryptoPlugin && !mCryptoPluginV1_4) {
+    return false;
+  }
+  return true;
+}
+
+void ClearKeyFuzzer::invokeDrm(const uint8_t *data, size_t size) {
+  if (!invokeDrmFactory()) {
+    return;
+  }
+  invokeDrmPlugin(data, size);
+}
+
+void ClearKeyFuzzer::invokeCrypto(const uint8_t *data) {
+  if (!invokeCryptoFactory()) {
+    return;
+  }
+  invokeCryptoPlugin(data);
+}
+
+void ClearKeyFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  invokeDrm(data, size);
+  invokeCrypto(data);
+  delete mFDP;
+}
+
+bool ClearKeyFuzzer::init() {
+  mCryptoFactory =
+      android::hardware::drm::V1_4::clearkey::createCryptoFactory();
+  mDrmFactory = android::hardware::drm::V1_4::clearkey::createDrmFactory();
+  if (!mDrmFactory && !mCryptoFactory) {
+    return false;
+  }
+  return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  if (size < kAESBlockSize) {
+    return 0;
+  }
+  ClearKeyFuzzer clearKeyFuzzer;
+  if (clearKeyFuzzer.init()) {
+    clearKeyFuzzer.process(data, size);
+  }
+  return 0;
+}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 5d6e3da..cb5c9fe 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -432,8 +432,7 @@
         mMockError = Status_V1_2::OK;
     }
 
-    DeviceFiles mFileHandle GUARDED_BY(mFileHandleLock);
-    Mutex mFileHandleLock;
+    DeviceFiles mFileHandle;
     Mutex mSecureStopLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
index a90d818..1d98860 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
@@ -5,9 +5,7 @@
 #ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
 #define CLEARKEY_MEMORY_FILE_SYSTEM_H_
 
-#include <android-base/thread_annotations.h>
 #include <map>
-#include <mutex>
 #include <string>
 
 #include "ClearKeyTypes.h"
@@ -51,12 +49,10 @@
     size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
 
  private:
-    mutable std::mutex mMemoryFileSystemLock;
-
     // License file name is made up of a unique keySetId, therefore,
     // the filename can be used as the key to locate licenses in the
     // memory file system.
-    std::map<std::string, MemoryFile> mMemoryFileSystem GUARDED_BY(mMemoryFileSystemLock);
+    std::map<std::string, MemoryFile> mMemoryFileSystem;
 
     std::string GetFileName(const std::string& path);
 
diff --git a/services/camera/libcameraservice/Android.mk b/drm/mediadrm/plugins/clearkey/service-lazy.mk
similarity index 77%
copy from services/camera/libcameraservice/Android.mk
copy to drm/mediadrm/plugins/clearkey/service-lazy.mk
index 4cfecfd..0d16f4c 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/drm/mediadrm/plugins/clearkey/service-lazy.mk
@@ -1,4 +1,5 @@
-# Copyright 2010 The Android Open Source Project
+#
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,12 +12,5 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-# Build tests
-
-include $(LOCAL_PATH)/tests/Android.mk
-
+#
+PRODUCT_PACKAGES += android.hardware.drm-service-lazy.clearkey
diff --git a/services/camera/libcameraservice/Android.mk b/drm/mediadrm/plugins/clearkey/service.mk
similarity index 77%
rename from services/camera/libcameraservice/Android.mk
rename to drm/mediadrm/plugins/clearkey/service.mk
index 4cfecfd..15988fb 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/drm/mediadrm/plugins/clearkey/service.mk
@@ -1,4 +1,5 @@
-# Copyright 2010 The Android Open Source Project
+#
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,12 +12,5 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-# Build tests
-
-include $(LOCAL_PATH)/tests/Android.mk
-
+#
+PRODUCT_PACKAGES += android.hardware.drm-service.clearkey
diff --git a/include/OWNERS b/include/OWNERS
index d6bd998..e1d4db7 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,6 +1,6 @@
 elaurent@google.com
-gkasten@google.com
 hunga@google.com
 jtinker@google.com
 lajos@google.com
-marcone@google.com
+essick@google.com
+philburk@google.com
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 16e794a..d4025e5 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -38,13 +38,13 @@
     VideoFrame(uint32_t width, uint32_t height,
             uint32_t displayWidth, uint32_t displayHeight,
             uint32_t tileWidth, uint32_t tileHeight,
-            uint32_t angle, uint32_t bpp, bool hasData, size_t iccSize):
+            uint32_t angle, uint32_t bpp, uint32_t bitDepth, bool hasData, size_t iccSize):
         mWidth(width), mHeight(height),
         mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
         mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
         mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
         mSize(hasData ? (bpp * width * height) : 0),
-        mIccSize(iccSize), mReserved(0) {
+        mIccSize(iccSize), mBitDepth(bitDepth) {
     }
 
     void init(const VideoFrame& copy, const void* iccData, size_t iccSize) {
@@ -84,7 +84,9 @@
     uint32_t mRowBytes;        // Number of bytes per row before rotation
     uint32_t mSize;            // Number of bytes of frame data
     uint32_t mIccSize;         // Number of bytes of ICC data
-    uint32_t mReserved;        // (padding to make mData 64-bit aligned)
+    uint32_t mBitDepth;        // number of bits per R / G / B channel
+
+    // Adding new items must be 64-bit aligned.
 };
 
 }; // namespace android
diff --git a/media/Android.mk b/media/Android.mk
new file mode 100644
index 0000000..220a358
--- /dev/null
+++ b/media/Android.mk
@@ -0,0 +1,5 @@
+LOCAL_PATH := $(call my-dir)
+
+$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
+$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
+$(eval $(call declare-1p-copy-files,frameworks/av/media/libstagefright,))
diff --git a/media/OWNERS b/media/OWNERS
index 4cf4870..4a25b68 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -1,7 +1,6 @@
 # Bug component: 1344
 elaurent@google.com
 essick@google.com
-hkuang@google.com
 hunga@google.com
 jiabin@google.com
 jmtrivi@google.com
@@ -15,6 +14,7 @@
 robertshih@google.com
 taklee@google.com
 wonsik@google.com
+ytai@google.com
 
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 5bc7262..a4c03ba 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,24 +1,7 @@
 // for frameworks/av/media
 {
-    "presubmit-large": [
-        // runs whenever we change something in this tree
-        {
-            "name": "CtsMediaTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.cts.EncodeDecodeTest"
-                }
-            ]
-        },
-        {
-            "name": "CtsMediaTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
-                }
-            ]
-        }
-    ],
+    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
+    // presubmit-large once issues in cuttlefish are fixed
     "presubmit": [
         {
             "name": "GtsMediaTestCases",
@@ -40,26 +23,8 @@
         {
             "path": "frameworks/av/drm/mediadrm/plugins"
         }
-    ],
-
-    "platinum-postsubmit": [
-        // runs regularly, independent of changes in this tree.
-        // signals if changes elsewhere break media functionality
-        {
-            "name": "CtsMediaTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.cts.EncodeDecodeTest"
-                }
-            ]
-        },
-        {
-            "name": "CtsMediaTestCases",
-            "options": [
-                {
-                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
-                }
-            ]
-        }
     ]
+
+    // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
+    // platinum-postsubmit once issues in cuttlefish are fixed
 }
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index be25ffb..828d861 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -21,11 +21,14 @@
 
     header_libs: [
         "libaudiohal_headers",
+        "libmedia_headers",
         "libmediametrics_headers",
     ],
 
     shared_libs: [
+        "packagemanager_aidl-cpp",
         "libaaudioservice",
+        "libaudioclient",
         "libaudioflinger",
         "libaudiopolicyservice",
         "libaudioprocessing",
@@ -41,7 +44,6 @@
         "libpowermanager",
         "libutils",
         "libvibrator",
-
     ],
 
     // TODO check if we still need all of these include directories
@@ -50,7 +52,6 @@
         "frameworks/av/media/libaaudio/include",
         "frameworks/av/media/libaaudio/src",
         "frameworks/av/media/libaaudio/src/binding",
-        "frameworks/av/media/libmedia/include",
         "frameworks/av/services/audioflinger",
         "frameworks/av/services/audiopolicy",
         "frameworks/av/services/audiopolicy/common/include",
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 8ee1efb..e3db5b4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -17,11 +17,17 @@
 #define LOG_TAG "audioserver"
 //#define LOG_NDEBUG 0
 
+#include <algorithm>
+
 #include <fcntl.h>
 #include <sys/prctl.h>
 #include <sys/wait.h>
 #include <cutils/properties.h>
 
+#include <android/media/audio/common/AudioMMapPolicy.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/IAudioFlingerService.h>
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
@@ -30,7 +36,6 @@
 #include <utils/Log.h>
 
 // from include_dirs
-#include "aaudio/AAudioTesting.h" // aaudio_policy_t, AAUDIO_PROP_MMAP_POLICY, AAUDIO_POLICY_*
 #include "AudioFlinger.h"
 #include "AudioPolicyService.h"
 #include "AAudioService.h"
@@ -39,6 +44,10 @@
 
 using namespace android;
 
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
 int main(int argc __unused, char **argv)
 {
     // TODO: update with refined parameters
@@ -73,10 +82,8 @@
         IPCThreadState::self()->joinThreadPool();
         for (;;) {
             siginfo_t info;
-            int ret = waitid(P_PID, childPid, &info, WEXITED | WSTOPPED | WCONTINUED);
-            if (ret == EINTR) {
-                continue;
-            }
+            int ret = TEMP_FAILURE_RETRY(waitid(P_PID, childPid, &info,
+                                                WEXITED | WSTOPPED | WCONTINUED));
             if (ret < 0) {
                 break;
             }
@@ -146,10 +153,24 @@
         // AAudioService should only be used in OC-MR1 and later.
         // And only enable the AAudioService if the system MMAP policy explicitly allows it.
         // This prevents a client from misusing AAudioService when it is not supported.
-        aaudio_policy_t mmapPolicy = property_get_int32(AAUDIO_PROP_MMAP_POLICY,
-                                                        AAUDIO_POLICY_NEVER);
-        if (mmapPolicy == AAUDIO_POLICY_AUTO || mmapPolicy == AAUDIO_POLICY_ALWAYS) {
+        // If we cannot get audio flinger here, there must be some serious problems. In that case,
+        // attempting to call audio flinger on a null pointer could make the process crash
+        // and attract attentions.
+        sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+        std::vector<AudioMMapPolicyInfo> policyInfos;
+        status_t status = af->getMmapPolicyInfos(
+                AudioMMapPolicyType::DEFAULT, &policyInfos);
+        // Initialize aaudio service when querying mmap policy succeeds and
+        // any of the policy supports MMAP.
+        if (status == NO_ERROR &&
+            std::any_of(policyInfos.begin(), policyInfos.end(), [](const auto& info) {
+                    return info.mmapPolicy == AudioMMapPolicy::AUTO ||
+                           info.mmapPolicy == AudioMMapPolicy::ALWAYS;
+            })) {
             AAudioService::instantiate();
+        } else {
+            ALOGD("Do not init aaudio service, status %d, policy info size %zu",
+                  status, policyInfos.size());
         }
 
         ProcessState::self()->startThreadPool();
diff --git a/media/bufferpool/1.0/vts/OWNERS b/media/bufferpool/1.0/vts/OWNERS
index 6733e0c..db54d45 100644
--- a/media/bufferpool/1.0/vts/OWNERS
+++ b/media/bufferpool/1.0/vts/OWNERS
@@ -1,6 +1,5 @@
 # Media team
 lajos@google.com
-pawin@google.com
 taklee@google.com
 wonsik@google.com
 
diff --git a/media/bufferpool/2.0/Android.bp b/media/bufferpool/2.0/Android.bp
index 0d1fe27..930b026 100644
--- a/media/bufferpool/2.0/Android.bp
+++ b/media/bufferpool/2.0/Android.bp
@@ -40,6 +40,12 @@
     defaults: ["libstagefright_bufferpool@2.0-default"],
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+        "test_com.android.media.swcodec",
+    ],
+
     // TODO: b/147147992
     double_loadable: true,
     cflags: [
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/bufferpool/2.0/tests/Android.bp
index 803a813..5e26e3a 100644
--- a/media/bufferpool/2.0/tests/Android.bp
+++ b/media/bufferpool/2.0/tests/Android.bp
@@ -80,3 +80,22 @@
     ],
     compile_multilib: "both",
 }
+
+cc_test {
+    name: "BufferpoolUnitTest",
+    test_suites: ["device-tests"],
+    defaults: ["VtsHalTargetTestDefaults"],
+    srcs: [
+        "allocator.cpp",
+        "BufferpoolUnitTest.cpp",
+    ],
+    static_libs: [
+        "android.hardware.media.bufferpool@2.0",
+        "libcutils",
+        "libstagefright_bufferpool@2.0.1",
+    ],
+    shared_libs: [
+        "libfmq",
+    ],
+    compile_multilib: "both",
+}
diff --git a/media/bufferpool/2.0/tests/AndroidTest.xml b/media/bufferpool/2.0/tests/AndroidTest.xml
new file mode 100644
index 0000000..b027ad0
--- /dev/null
+++ b/media/bufferpool/2.0/tests/AndroidTest.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for bufferpool unit tests">
+    <option name="test-suite-tag" value="BufferpoolUnitTest" />
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="BufferpoolUnitTest" />
+    </test>
+</configuration>
diff --git a/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp b/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
new file mode 100644
index 0000000..b448405
--- /dev/null
+++ b/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
@@ -0,0 +1,541 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "BufferpoolUnitTest"
+#include <utils/Log.h>
+
+#include <binder/ProcessState.h>
+#include <bufferpool/ClientManager.h>
+#include <gtest/gtest.h>
+#include <hidl/LegacySupport.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unordered_set>
+#include <vector>
+#include "allocator.h"
+
+using android::hardware::configureRpcThreadpool;
+using android::hardware::media::bufferpool::BufferPoolData;
+using android::hardware::media::bufferpool::V2_0::IClientManager;
+using android::hardware::media::bufferpool::V2_0::ResultStatus;
+using android::hardware::media::bufferpool::V2_0::implementation::BufferId;
+using android::hardware::media::bufferpool::V2_0::implementation::ClientManager;
+using android::hardware::media::bufferpool::V2_0::implementation::ConnectionId;
+using android::hardware::media::bufferpool::V2_0::implementation::TransactionId;
+
+using namespace android;
+
+// communication message types between processes.
+enum PipeCommand : int32_t {
+    INIT,
+    TRANSFER,
+    STOP,
+
+    INIT_OK,
+    INIT_ERROR,
+    TRANSFER_OK,
+    TRANSFER_ERROR,
+    STOP_OK,
+    STOP_ERROR,
+};
+
+// communication message between processes.
+union PipeMessage {
+    struct {
+        int32_t command;
+        int32_t memsetValue;
+        BufferId bufferId;
+        ConnectionId connectionId;
+        TransactionId transactionId;
+        int64_t timestampUs;
+    } data;
+    char array[0];
+};
+
+static int32_t kNumIterationCount = 10;
+
+class BufferpoolTest {
+  public:
+    BufferpoolTest() : mConnectionValid(false), mManager(nullptr), mAllocator(nullptr) {
+        mConnectionId = -1;
+        mReceiverId = -1;
+    }
+
+    ~BufferpoolTest() {
+        if (mConnectionValid) {
+            mManager->close(mConnectionId);
+        }
+    }
+
+  protected:
+    bool mConnectionValid;
+    ConnectionId mConnectionId;
+    ConnectionId mReceiverId;
+
+    android::sp<ClientManager> mManager;
+    std::shared_ptr<BufferPoolAllocator> mAllocator;
+
+    void setupBufferpoolManager();
+};
+
+void BufferpoolTest::setupBufferpoolManager() {
+    // retrieving per process bufferpool object sp<ClientManager>
+    mManager = ClientManager::getInstance();
+    ASSERT_NE(mManager, nullptr) << "unable to get ClientManager\n";
+
+    mAllocator = std::make_shared<TestBufferPoolAllocator>();
+    ASSERT_NE(mAllocator, nullptr) << "unable to create TestBufferPoolAllocator\n";
+
+    // set-up local bufferpool connection for sender
+    ResultStatus status = mManager->create(mAllocator, &mConnectionId);
+    ASSERT_EQ(status, ResultStatus::OK)
+            << "unable to set-up local bufferpool connection for sender\n";
+    mConnectionValid = true;
+}
+
+class BufferpoolUnitTest : public BufferpoolTest, public ::testing::Test {
+  public:
+    virtual void SetUp() override { setupBufferpoolManager(); }
+
+    virtual void TearDown() override {}
+};
+
+class BufferpoolFunctionalityTest : public BufferpoolTest, public ::testing::Test {
+  public:
+    virtual void SetUp() override {
+        mReceiverPid = -1;
+
+        ASSERT_TRUE(pipe(mCommandPipeFds) == 0) << "pipe connection failed for commandPipe\n";
+        ASSERT_TRUE(pipe(mResultPipeFds) == 0) << "pipe connection failed for resultPipe\n";
+
+        mReceiverPid = fork();
+        ASSERT_TRUE(mReceiverPid >= 0) << "fork failed\n";
+
+        if (mReceiverPid == 0) {
+            doReceiver();
+            // In order to ignore gtest behaviour, wait for being killed from tearDown
+            pause();
+        }
+        setupBufferpoolManager();
+    }
+
+    virtual void TearDown() override {
+        if (mReceiverPid > 0) {
+            kill(mReceiverPid, SIGKILL);
+            int wstatus;
+            wait(&wstatus);
+        }
+    }
+
+  protected:
+    pid_t mReceiverPid;
+    int mCommandPipeFds[2];
+    int mResultPipeFds[2];
+
+    bool sendMessage(int* pipes, const PipeMessage& message) {
+        int ret = write(pipes[1], message.array, sizeof(PipeMessage));
+        return ret == sizeof(PipeMessage);
+    }
+
+    bool receiveMessage(int* pipes, PipeMessage* message) {
+        int ret = read(pipes[0], message->array, sizeof(PipeMessage));
+        return ret == sizeof(PipeMessage);
+    }
+
+    void doReceiver();
+};
+
+void BufferpoolFunctionalityTest::doReceiver() {
+    // Configures the threadpool used for handling incoming RPC calls in this process.
+    configureRpcThreadpool(1 /*threads*/, false /*willJoin*/);
+    bool receiverRunning = true;
+    while (receiverRunning) {
+        PipeMessage message;
+        receiveMessage(mCommandPipeFds, &message);
+        ResultStatus err = ResultStatus::OK;
+        switch (message.data.command) {
+            case PipeCommand::INIT: {
+                // receiver manager creation
+                mManager = ClientManager::getInstance();
+                if (!mManager) {
+                    message.data.command = PipeCommand::INIT_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+
+                android::status_t status = mManager->registerAsService();
+                if (status != android::OK) {
+                    message.data.command = PipeCommand::INIT_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                message.data.command = PipeCommand::INIT_OK;
+                sendMessage(mResultPipeFds, message);
+                break;
+            }
+            case PipeCommand::TRANSFER: {
+                native_handle_t* receiveHandle = nullptr;
+                std::shared_ptr<BufferPoolData> receiveBuffer;
+                err = mManager->receive(message.data.connectionId, message.data.transactionId,
+                                        message.data.bufferId, message.data.timestampUs,
+                                        &receiveHandle, &receiveBuffer);
+                if (err != ResultStatus::OK) {
+                    message.data.command = PipeCommand::TRANSFER_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                if (!TestBufferPoolAllocator::Verify(receiveHandle, message.data.memsetValue)) {
+                    message.data.command = PipeCommand::TRANSFER_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                if (receiveHandle) {
+                    native_handle_close(receiveHandle);
+                    native_handle_delete(receiveHandle);
+                }
+                receiveHandle = nullptr;
+                receiveBuffer.reset();
+                message.data.command = PipeCommand::TRANSFER_OK;
+                sendMessage(mResultPipeFds, message);
+                break;
+            }
+            case PipeCommand::STOP: {
+                err = mManager->close(message.data.connectionId);
+                if (err != ResultStatus::OK) {
+                    message.data.command = PipeCommand::STOP_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                message.data.command = PipeCommand::STOP_OK;
+                sendMessage(mResultPipeFds, message);
+                receiverRunning = false;
+                break;
+            }
+            default:
+                ALOGE("unknown command. try again");
+                break;
+        }
+    }
+}
+
+// Buffer allocation test.
+// Check whether each buffer allocation is done successfully with unique buffer id.
+TEST_F(BufferpoolUnitTest, AllocateBuffer) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    std::vector<std::shared_ptr<BufferPoolData>> buffers{};
+    std::vector<native_handle_t*> allocHandle{};
+    ResultStatus status;
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer{};
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << "iteration";
+
+        buffers.push_back(std::move(buffer));
+        if (handle) {
+            allocHandle.push_back(std::move(handle));
+        }
+    }
+
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        for (int j = i + 1; j < kNumIterationCount; ++j) {
+            ASSERT_TRUE(buffers[i]->mId != buffers[j]->mId) << "allocated buffers are not unique";
+        }
+    }
+    // delete the buffer handles
+    for (auto handle : allocHandle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+    // clear the vectors
+    buffers.clear();
+    allocHandle.clear();
+}
+
+// Buffer recycle test.
+// Check whether de-allocated buffers are recycled.
+TEST_F(BufferpoolUnitTest, RecycleBuffer) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    ResultStatus status;
+    std::vector<BufferId> bid{};
+    std::vector<native_handle_t*> allocHandle{};
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer;
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << "iteration";
+
+        bid.push_back(buffer->mId);
+        if (handle) {
+            allocHandle.push_back(std::move(handle));
+        }
+        buffer.reset();
+    }
+
+    std::unordered_set<BufferId> set(bid.begin(), bid.end());
+    ASSERT_EQ(set.size(), 1) << "buffers are not recycled properly";
+
+    // delete the buffer handles
+    for (auto handle : allocHandle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+    allocHandle.clear();
+}
+
+// Validate cache evict and invalidate APIs.
+TEST_F(BufferpoolUnitTest, FlushTest) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    ResultStatus status = mManager->registerSender(mManager, mConnectionId, &mReceiverId);
+    ASSERT_TRUE(status == ResultStatus::ALREADY_EXISTS && mReceiverId == mConnectionId);
+
+    // testing empty flush
+    status = mManager->flush(mConnectionId);
+    ASSERT_EQ(status, ResultStatus::OK) << "failed to flush connection : " << mConnectionId;
+
+    std::vector<std::shared_ptr<BufferPoolData>> senderBuffer{};
+    std::vector<native_handle_t*> allocHandle{};
+    std::vector<TransactionId> tid{};
+    std::vector<int64_t> timestampUs{};
+
+    std::map<TransactionId, BufferId> bufferMap{};
+
+    for (int i = 0; i < kNumIterationCount; i++) {
+        int64_t postUs;
+        TransactionId transactionId;
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer{};
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << " iteration";
+
+        ASSERT_TRUE(TestBufferPoolAllocator::Fill(handle, i));
+
+        status = mManager->postSend(mReceiverId, buffer, &transactionId, &postUs);
+        ASSERT_EQ(status, ResultStatus::OK) << "unable to post send transaction on bufferpool";
+
+        timestampUs.push_back(postUs);
+        tid.push_back(transactionId);
+        bufferMap.insert({transactionId, buffer->mId});
+
+        senderBuffer.push_back(std::move(buffer));
+        if (handle) {
+            allocHandle.push_back(std::move(handle));
+        }
+        buffer.reset();
+    }
+
+    status = mManager->flush(mConnectionId);
+    ASSERT_EQ(status, ResultStatus::OK) << "failed to flush connection : " << mConnectionId;
+
+    std::shared_ptr<BufferPoolData> receiverBuffer{};
+    native_handle_t* recvHandle = nullptr;
+    for (int i = 0; i < kNumIterationCount; i++) {
+        status = mManager->receive(mReceiverId, tid[i], senderBuffer[i]->mId, timestampUs[i],
+                                   &recvHandle, &receiverBuffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "receive failed for buffer " << senderBuffer[i]->mId;
+
+        // find the buffer id from transaction id
+        auto findIt = bufferMap.find(tid[i]);
+        ASSERT_NE(findIt, bufferMap.end()) << "inconsistent buffer mapping";
+
+        // buffer id received must be same as the buffer id sent
+        ASSERT_EQ(findIt->second, receiverBuffer->mId) << "invalid buffer received";
+
+        ASSERT_TRUE(TestBufferPoolAllocator::Verify(recvHandle, i))
+                << "Message received not same as that sent";
+
+        bufferMap.erase(findIt);
+        if (recvHandle) {
+            native_handle_close(recvHandle);
+            native_handle_delete(recvHandle);
+        }
+        recvHandle = nullptr;
+        receiverBuffer.reset();
+    }
+
+    ASSERT_EQ(bufferMap.size(), 0) << "buffers received is less than the number of buffers sent";
+
+    for (auto handle : allocHandle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+    allocHandle.clear();
+    senderBuffer.clear();
+    timestampUs.clear();
+}
+
+// Buffer transfer test between processes.
+TEST_F(BufferpoolFunctionalityTest, TransferBuffer) {
+    // initialize the receiver
+    PipeMessage message;
+    message.data.command = PipeCommand::INIT;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::INIT_OK) << "receiver init failed";
+
+    android::sp<IClientManager> receiver = IClientManager::getService();
+    ASSERT_NE(receiver, nullptr) << "getService failed for receiver\n";
+
+    ConnectionId receiverId;
+    ResultStatus status = mManager->registerSender(receiver, mConnectionId, &receiverId);
+    ASSERT_EQ(status, ResultStatus::OK)
+            << "registerSender failed for connection id " << mConnectionId << "\n";
+
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer;
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << "iteration";
+
+        ASSERT_TRUE(TestBufferPoolAllocator::Fill(handle, i))
+                << "Fill fail for buffer handle " << handle << "\n";
+
+        // send the buffer to the receiver
+        int64_t postUs;
+        TransactionId transactionId;
+        status = mManager->postSend(receiverId, buffer, &transactionId, &postUs);
+        ASSERT_EQ(status, ResultStatus::OK)
+                << "postSend failed for receiver " << receiverId << "\n";
+
+        // PipeMessage message;
+        message.data.command = PipeCommand::TRANSFER;
+        message.data.memsetValue = i;
+        message.data.bufferId = buffer->mId;
+        message.data.connectionId = receiverId;
+        message.data.transactionId = transactionId;
+        message.data.timestampUs = postUs;
+        sendMessage(mCommandPipeFds, message);
+        // delete buffer handle
+        if (handle) {
+            native_handle_close(handle);
+            native_handle_delete(handle);
+        }
+        ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+        ASSERT_EQ(message.data.command, PipeCommand::TRANSFER_OK)
+                << "received error during buffer transfer\n";
+    }
+    message.data.command = PipeCommand::STOP;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::STOP_OK)
+            << "received error during buffer transfer\n";
+}
+
+/* Validate bufferpool for following corner cases:
+ 1. invalid connectionID
+ 2. invalid receiver
+ 3. when sender is not registered
+ 4. when connection is closed
+*/
+// TODO: Enable when the issue in b/212196495 is fixed
+TEST_F(BufferpoolFunctionalityTest, DISABLED_ValidityTest) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    std::shared_ptr<BufferPoolData> senderBuffer;
+    native_handle_t* allocHandle = nullptr;
+
+    // call allocate() on a random connection id
+    ConnectionId randomId = rand();
+    ResultStatus status = mManager->allocate(randomId, vecParams, &allocHandle, &senderBuffer);
+    EXPECT_TRUE(status == ResultStatus::NOT_FOUND);
+
+    // initialize the receiver
+    PipeMessage message;
+    message.data.command = PipeCommand::INIT;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::INIT_OK) << "receiver init failed";
+
+    allocHandle = nullptr;
+    senderBuffer.reset();
+    status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &senderBuffer);
+
+    ASSERT_TRUE(TestBufferPoolAllocator::Fill(allocHandle, 0x77));
+
+    // send buffers w/o registering sender
+    int64_t postUs;
+    TransactionId transactionId;
+
+    // random receiver
+    status = mManager->postSend(randomId, senderBuffer, &transactionId, &postUs);
+    ASSERT_NE(status, ResultStatus::OK) << "bufferpool shouldn't allow send on random receiver";
+
+    // establish connection
+    android::sp<IClientManager> receiver = IClientManager::getService();
+    ASSERT_NE(receiver, nullptr) << "getService failed for receiver\n";
+
+    ConnectionId receiverId;
+    status = mManager->registerSender(receiver, mConnectionId, &receiverId);
+    ASSERT_EQ(status, ResultStatus::OK)
+            << "registerSender failed for connection id " << mConnectionId << "\n";
+
+    allocHandle = nullptr;
+    senderBuffer.reset();
+    status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &senderBuffer);
+    ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for connection " << mConnectionId;
+
+    ASSERT_TRUE(TestBufferPoolAllocator::Fill(allocHandle, 0x88));
+
+    // send the buffer to the receiver
+    status = mManager->postSend(receiverId, senderBuffer, &transactionId, &postUs);
+    ASSERT_EQ(status, ResultStatus::OK) << "postSend failed for receiver " << receiverId << "\n";
+
+    // PipeMessage message;
+    message.data.command = PipeCommand::TRANSFER;
+    message.data.memsetValue = 0x88;
+    message.data.bufferId = senderBuffer->mId;
+    message.data.connectionId = receiverId;
+    message.data.transactionId = transactionId;
+    message.data.timestampUs = postUs;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::TRANSFER_OK)
+            << "received error during buffer transfer\n";
+
+    if (allocHandle) {
+        native_handle_close(allocHandle);
+        native_handle_delete(allocHandle);
+    }
+
+    message.data.command = PipeCommand::STOP;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::STOP_OK)
+            << "received error during buffer transfer\n";
+
+    // try to send msg to closed connection
+    status = mManager->postSend(receiverId, senderBuffer, &transactionId, &postUs);
+    ASSERT_NE(status, ResultStatus::OK) << "bufferpool shouldn't allow send on closed connection";
+}
+
+int main(int argc, char** argv) {
+    android::hardware::details::setTrebleTestingOverride(true);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/bufferpool/2.0/tests/OWNERS b/media/bufferpool/2.0/tests/OWNERS
index 6733e0c..db54d45 100644
--- a/media/bufferpool/2.0/tests/OWNERS
+++ b/media/bufferpool/2.0/tests/OWNERS
@@ -1,6 +1,5 @@
 # Media team
 lajos@google.com
-pawin@google.com
 taklee@google.com
 wonsik@google.com
 
diff --git a/media/bufferpool/2.0/tests/README.md b/media/bufferpool/2.0/tests/README.md
new file mode 100644
index 0000000..5efd966
--- /dev/null
+++ b/media/bufferpool/2.0/tests/README.md
@@ -0,0 +1,33 @@
+## Media Testing ##
+---
+#### Bufferpool :
+The Bufferpool Test Suite validates bufferpool library in android.
+
+Run the following steps to build the test suite:
+```
+m BufferpoolUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/BufferpoolUnitTest/BufferpoolUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/BufferpoolUnitTest/BufferpoolUnitTest /data/local/tmp/
+```
+
+usage: BufferpoolUnitTest
+```
+adb shell /data/local/tmp/BufferpoolUnitTest
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest BufferpoolUnitTest
+```
diff --git a/media/codec2/OWNERS b/media/codec2/OWNERS
index 46a9fca..7d40041 100644
--- a/media/codec2/OWNERS
+++ b/media/codec2/OWNERS
@@ -1,5 +1,4 @@
 set noparent
 wonsik@google.com
 lajos@google.com
-pawin@google.com
 taklee@google.com
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 6ac4210..90bb054 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -8,7 +8,18 @@
   ],
   "presubmit-large": [
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaMiscTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaAudioTestCases",
       "options": [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
@@ -18,10 +29,54 @@
         },
         // TODO: b/149314419
         {
-          "exclude-filter": "android.media.cts.AudioPlaybackCaptureTest"
+          "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
         },
         {
-          "exclude-filter": "android.media.cts.AudioRecordTest"
+          "exclude-filter": "android.media.audio.cts.AudioRecordTest"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaDecoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaEncoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaPlayerTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
         }
       ]
     }
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 57cdcd0..4e4a9a1 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -221,6 +221,12 @@
                 .withFields({C2F(mDrcOutputLoudness, value).inRange(-57.75, 0.25)})
                 .withSetter(Setter<decltype(*mDrcOutputLoudness)>::StrictValueWithNoDeps)
                 .build());
+
+        addParameter(DefineParam(mChannelMask, C2_PARAMKEY_CHANNEL_MASK)
+                .withDefault(new C2StreamChannelMaskInfo::output(0u, 0))
+                .withFields({C2F(mChannelMask, value).inRange(0, 4294967292)})
+                .withSetter(Setter<decltype(*mChannelMask)>::StrictValueWithNoDeps)
+                .build());
     }
 
     bool isAdts() const { return mAacFormat->value == C2Config::AAC_PACKAGING_ADTS; }
@@ -255,6 +261,7 @@
     std::shared_ptr<C2StreamDrcAlbumModeTuning::input> mDrcAlbumMode;
     std::shared_ptr<C2StreamMaxChannelCountInfo::input> mMaxChannelCount;
     std::shared_ptr<C2StreamDrcOutputLoudnessTuning::output> mDrcOutputLoudness;
+    std::shared_ptr<C2StreamChannelMaskInfo::output> mChannelMask;
     // TODO Add : C2StreamAacSbrModeTuning
 };
 
@@ -829,9 +836,11 @@
 
                 C2StreamSampleRateInfo::output sampleRateInfo(0u, mStreamInfo->sampleRate);
                 C2StreamChannelCountInfo::output channelCountInfo(0u, mStreamInfo->numChannels);
+                C2StreamChannelMaskInfo::output channelMaskInfo(0u,
+                        maskFromCount(mStreamInfo->numChannels));
                 std::vector<std::unique_ptr<C2SettingResult>> failures;
                 c2_status_t err = mIntf->config(
-                        { &sampleRateInfo, &channelCountInfo },
+                        { &sampleRateInfo, &channelCountInfo, &channelMaskInfo },
                         C2_MAY_BLOCK,
                         &failures);
                 if (err == OK) {
@@ -840,6 +849,7 @@
                     C2FrameData &output = work->worklets.front()->output;
                     output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
                     output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+                    output.configUpdate.push_back(C2Param::Copy(channelMaskInfo));
                 } else {
                     ALOGE("Config Update failed");
                     mSignalledError = true;
@@ -1056,6 +1066,47 @@
     }
 }
 
+// definitions based on android.media.AudioFormat.CHANNEL_OUT_*
+#define CHANNEL_OUT_FL  0x4
+#define CHANNEL_OUT_FR  0x8
+#define CHANNEL_OUT_FC  0x10
+#define CHANNEL_OUT_LFE 0x20
+#define CHANNEL_OUT_BL  0x40
+#define CHANNEL_OUT_BR  0x80
+#define CHANNEL_OUT_SL  0x800
+#define CHANNEL_OUT_SR  0x1000
+
+uint32_t C2SoftAacDec::maskFromCount(uint32_t channelCount) {
+    // KEY_CHANNEL_MASK expects masks formatted according to Java android.media.AudioFormat
+    // where the two left-most bits are 0 for output channel mask
+    switch (channelCount) {
+        case 1: // mono is front left
+            return (CHANNEL_OUT_FL);
+        case 2: // stereo
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FR);
+        case 4: // 4.0 = stereo with backs
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR);
+        case 5: // 5.0
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR);
+        case 6: // 5.1 = 5.0 + LFE
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR
+                    | CHANNEL_OUT_LFE);
+        case 7: // 7.0 = 5.0 + Sides
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR
+                    | CHANNEL_OUT_SL | CHANNEL_OUT_SR);
+        case 8: // 7.1 = 7.0 + LFE
+            return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+                    | CHANNEL_OUT_BL | CHANNEL_OUT_BR | CHANNEL_OUT_SL | CHANNEL_OUT_SR
+                    | CHANNEL_OUT_LFE);
+        default:
+            return 0;
+    }
+}
+
 class C2SoftAacDecFactory : public C2ComponentFactory {
 public:
     C2SoftAacDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index a03fc70..b45f148 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -101,6 +101,7 @@
     int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
     int32_t outputDelayRingBufferSamplesAvailable();
     int32_t outputDelayRingBufferSpaceLeft();
+    uint32_t maskFromCount(uint32_t channelCount);
 
     C2_DO_NOT_COPY(C2SoftAacDec);
 };
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index bb63e1f..7afea91 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -225,7 +225,7 @@
         work->result = C2_CORRUPTED;
         return;
     }
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
@@ -266,7 +266,7 @@
     ALOGV("causal sample size %d", mFilledLen);
     if (mIsFirst && outPos != 0) {
         mIsFirst = false;
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
     }
     fillEmptyWork(work);
     if (outPos != 0) {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
index 6ab14db..4920b23 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
@@ -54,7 +54,7 @@
     bool mIsFirst;
     bool mSignalledError;
     bool mSignalledOutputEos;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     int32_t mFilledLen;
     int16_t mInputFrame[kNumSamplesPerFrame];
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 84728ae..29b1040 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -307,7 +307,7 @@
         work->result = wView.error();
         return;
     }
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
@@ -341,7 +341,7 @@
     ALOGV("causal sample size %d", mFilledLen);
     if (mIsFirst && outPos != 0) {
         mIsFirst = false;
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
     }
     fillEmptyWork(work);
     if (outPos != 0) {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
index 0cc9e9f..72990c3 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
@@ -55,7 +55,7 @@
     bool mIsFirst;
     bool mSignalledError;
     bool mSignalledOutputEos;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     int32_t mFilledLen;
     int16_t mInputFrame[kNumSamplesPerFrame];
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index cb9837f..a2a79d5 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -22,8 +22,4 @@
 
     srcs: ["C2SoftAomDec.cpp"],
     static_libs: ["libaom"],
-
-    include_dirs: [
-        "external/libaom/",
-    ],
 }
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c08cd59..96b81d7 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -261,8 +261,7 @@
     CREATE_DUMP_FILE(mInFile);
     CREATE_DUMP_FILE(mOutFile);
 
-    gettimeofday(&mTimeStart, nullptr);
-    gettimeofday(&mTimeEnd, nullptr);
+    mTimeStart = mTimeEnd = systemTime();
 }
 
 C2SoftAomDec::~C2SoftAomDec() {
@@ -463,19 +462,17 @@
     int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
     if (inSize) {
         uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
-        int32_t decodeTime = 0;
-        int32_t delay = 0;
 
         DUMP_TO_FILE(mOutFile, bitstream, inSize);
-        GETTIME(&mTimeStart, nullptr);
-        TIME_DIFF(mTimeEnd, mTimeStart, delay);
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
 
         aom_codec_err_t err =
             aom_codec_decode(mCodecCtx, bitstream, inSize, &frameIndex);
 
-        GETTIME(&mTimeEnd, nullptr);
-        TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
-        ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
+        mTimeEnd = systemTime();
+        nsecs_t decodeTime = mTimeEnd - mTimeStart;
+        ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
 
         if (err != AOM_CODEC_OK) {
             ALOGE("av1 decoder failed to decode frame err: %d", err);
@@ -505,124 +502,6 @@
     }
 }
 
-static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        uint32_t width, uint32_t height) {
-
-    for (size_t i = 0; i < height; ++i) {
-        memcpy(dstY, srcY, width);
-        srcY += srcYStride;
-        dstY += dstYStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-        memcpy(dstV, srcV, width / 2);
-        srcV += srcVStride;
-        dstV += dstUVStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-        memcpy(dstU, srcU, width / 2);
-        srcU += srcUStride;
-        dstU += dstUVStride;
-    }
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstStride, size_t width, size_t height) {
-
-    // Converting two lines at a time, slightly faster
-    for (size_t y = 0; y < height; y += 2) {
-        uint32_t *dstTop = (uint32_t *) dst;
-        uint32_t *dstBot = (uint32_t *) (dst + dstStride);
-        uint16_t *ySrcTop = (uint16_t*) srcY;
-        uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
-        uint16_t *uSrc = (uint16_t*) srcU;
-        uint16_t *vSrc = (uint16_t*) srcV;
-
-        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
-        size_t x = 0;
-        for (; x < width - 3; x += 4) {
-
-            u01 = *((uint32_t*)uSrc); uSrc += 2;
-            v01 = *((uint32_t*)vSrc); vSrc += 2;
-
-            y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-            y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
-            *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
-            *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
-            *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
-            *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
-        }
-
-        // There should be at most 2 more pixels to process. Note that we don't
-        // need to consider odd case as the buffer is always aligned to even.
-        if (x < width) {
-            u01 = *uSrc;
-            v01 = *vSrc;
-            y01 = *((uint32_t*)ySrcTop);
-            y45 = *((uint32_t*)ySrcBot);
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = ((y01 >> 16) << 10) | uv0;
-            *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = ((y45 >> 16) << 10) | uv0;
-        }
-
-        srcY += srcYStride * 2;
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dst += dstStride * 2;
-    }
-
-    return;
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        size_t width, size_t height) {
-
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; ++x) {
-            dstY[x] = (uint8_t)(srcY[x] >> 2);
-        }
-
-        srcY += srcYStride;
-        dstY += dstYStride;
-    }
-
-    for (size_t y = 0; y < (height + 1) / 2; ++y) {
-        for (size_t x = 0; x < (width + 1) / 2; ++x) {
-            dstU[x] = (uint8_t)(srcU[x] >> 2);
-            dstV[x] = (uint8_t)(srcV[x] >> 2);
-        }
-
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dstU += dstUVStride;
-        dstV += dstUVStride;
-    }
-    return;
-}
 bool C2SoftAomDec::outputBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const std::unique_ptr<C2Work> &work)
@@ -657,9 +536,10 @@
 
     std::shared_ptr<C2GraphicBlock> block;
     uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects;
     if (img->fmt == AOM_IMG_FMT_I42016) {
         IntfImpl::Lock lock = mIntf->lock();
-        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+        defaultColorAspects = mIntf->getDefaultColorAspects_l();
 
         if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
             defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -706,26 +586,21 @@
         const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
 
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-            convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2,
-                                    dstYStride / sizeof(uint32_t),
-                                    mWidth, mHeight);
+            convertYUV420Planar16ToY410OrRGBA1010102(
+                    (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+                    srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                    std::static_pointer_cast<const C2ColorAspectsStruct>(defaultColorAspects));
         } else {
-            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
-                                    srcY, srcU, srcV,
-                                    srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                    dstYStride, dstUVStride,
-                                    mWidth, mHeight);
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        mWidth, mHeight);
         }
     } else {
         const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
-        copyOutputBufferToYuvPlanarFrame(
-                dstY, dstU, dstV, srcY, srcU, srcV,
-                srcYStride, srcUStride, srcVStride,
-                dstYStride, dstUVStride,
-                mWidth, mHeight);
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
     }
     finishWork(*(int64_t*)img->user_priv, work, std::move(block));
     block = nullptr;
diff --git a/media/codec2/components/aom/C2SoftAomDec.h b/media/codec2/components/aom/C2SoftAomDec.h
index 4c82647..8b953fe 100644
--- a/media/codec2/components/aom/C2SoftAomDec.h
+++ b/media/codec2/components/aom/C2SoftAomDec.h
@@ -17,15 +17,12 @@
 #ifndef ANDROID_C2_SOFT_AV1_DEC_H_
 #define ANDROID_C2_SOFT_AV1_DEC_H_
 
+#include <inttypes.h>
+
 #include <SimpleC2Component.h>
 #include "aom/aom_decoder.h"
 #include "aom/aomdx.h"
 
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff)     \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
-
 namespace android {
 
 struct C2SoftAomDec : public SimpleC2Component {
@@ -60,8 +57,8 @@
     char mOutFile[200];
     #endif /* FILE_DUMP_ENABLE */
 
-    struct timeval mTimeStart;   // Time at the start of decode()
-    struct timeval mTimeEnd;     // Time at the end of decode()
+    nsecs_t mTimeStart = 0;   // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;     // Time at the end of decode()
 
     status_t initDecoder();
     status_t destroyDecoder();
@@ -85,14 +82,13 @@
 #define OUTPUT_DUMP_EXT "av1"
 #define GENERATE_FILE_NAMES()                                                 \
     {                                                                         \
-        GETTIME(&mTimeStart, NULL);                                           \
-        strcpy(mInFile, "");                                                  \
+        nsecs_t now = systemTime();                                           \
         ALOGD("GENERATE_FILE_NAMES");                                         \
-        sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
-                mTimeStart.tv_usec, INPUT_DUMP_EXT);                          \
+        sprintf(mInFile, "%s_%" PRId64 ".%s", INPUT_DUMP_PATH,                \
+                now, INPUT_DUMP_EXT);                                         \
         strcpy(mOutFile, "");                                                 \
-        sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,                  \
-                mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT);      \
+        sprintf(mOutFile, "%s_%" PRId64 ".%s", OUTPUT_DUMP_PATH,              \
+                now, OUTPUT_DUMP_EXT);                                        \
     }
 
 #define CREATE_DUMP_FILE(m_filename)                     \
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index 0be1bed..a7ae85b 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -19,10 +19,7 @@
 
     srcs: ["C2SoftAvcDec.cpp"],
 
-    include_dirs: [
-        "external/libavc/decoder",
-        "external/libavc/common",
-    ],
+    export_include_dirs: ["."],
 }
 
 cc_library {
@@ -37,10 +34,7 @@
 
     srcs: ["C2SoftAvcEnc.cpp"],
 
-    include_dirs: [
-        "external/libavc/encoder",
-        "external/libavc/common",
-    ],
+    export_include_dirs: ["."],
 
     cflags: [
         "-Wno-unused-variable",
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index e8287f9..953afc5 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -511,7 +511,7 @@
 status_t C2SoftAvcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN32(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -670,8 +670,7 @@
 
 void C2SoftAvcDec::resetPlugin() {
     mSignalledOutputEos = false;
-    gettimeofday(&mTimeStart, nullptr);
-    gettimeofday(&mTimeEnd, nullptr);
+    mTimeStart = mTimeEnd = systemTime();
 }
 
 status_t C2SoftAvcDec::deleteDecoder() {
@@ -777,20 +776,20 @@
         return C2_CORRUPTED;
     }
     if (mOutBlock &&
-            (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+            (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
         mOutBlock.reset();
     }
     if (!mOutBlock) {
         uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
-            pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+            pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
         if (err != C2_OK) {
             ALOGE("fetchGraphicBlock for Output failed with status %d", err);
             return err;
         }
         ALOGV("provided (%dx%d) required (%dx%d)",
-              mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+              mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
     }
 
     return C2_OK;
@@ -866,14 +865,13 @@
                 setParams(mStride, IVD_DECODE_HEADER);
             }
 
-            WORD32 delay;
-            GETTIME(&mTimeStart, nullptr);
-            TIME_DIFF(mTimeEnd, mTimeStart, delay);
+            mTimeStart = systemTime();
+            nsecs_t delay = mTimeStart - mTimeEnd;
             (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
-            WORD32 decodeTime;
-            GETTIME(&mTimeEnd, nullptr);
-            TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
-            ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            ALOGV("decodeTime=%" PRId64 " delay=%" PRId64 " numBytes=%6d", decodeTime, delay,
                   ps_decode_op->u4_num_bytes_consumed);
         }
         if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
@@ -928,7 +926,7 @@
         if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                mStride = ALIGN32(ps_decode_op->u4_pic_wd);
+                mStride = ALIGN128(ps_decode_op->u4_pic_wd);
                 setParams(mStride, IVD_DECODE_FRAME);
             }
             if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 5c07d29..36a463e 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_AVC_DEC_H_
 
 #include <sys/time.h>
+#include <inttypes.h>
 
 #include <media/stagefright/foundation/ColorUtils.h>
 
@@ -38,24 +39,20 @@
 #define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
 #define ivdext_ctl_get_vui_params_ip_t  ih264d_ctl_get_vui_params_ip_t
 #define ivdext_ctl_get_vui_params_op_t  ih264d_ctl_get_vui_params_op_t
-#define ALIGN32(x)                      ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
 #define MIN(a, b)                       (((a) < (b)) ? (a) : (b))
-#define GETTIME(a, b)                   gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff)     \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
 
 #ifdef FILE_DUMP_ENABLE
     #define INPUT_DUMP_PATH     "/sdcard/clips/avcd_input"
     #define INPUT_DUMP_EXT      "h264"
     #define GENERATE_FILE_NAMES() {                         \
-        GETTIME(&mTimeStart, NULL);                         \
+        nsecs_t now = systemTime();                         \
         strcpy(mInFile, "");                                \
-        sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH,  \
-                mTimeStart.tv_sec, mTimeStart.tv_usec,      \
+        sprintf(mInFile, "%s_%" PRId64 "d.%s",              \
+                INPUT_DUMP_PATH, now,                       \
                 INPUT_DUMP_EXT);                            \
     }
     #define CREATE_DUMP_FILE(m_filename) {                  \
@@ -183,8 +180,8 @@
     } mBitstreamColorAspects;
 
     // profile
-    struct timeval mTimeStart;
-    struct timeval mTimeEnd;
+    nsecs_t mTimeStart = 0;
+    nsecs_t mTimeEnd = 0;
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
 #endif /* FILE_DUMP_ENABLE */
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index d65ffa5..8b46d3f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -656,8 +656,7 @@
     mEntropyMode = DEFAULT_ENTROPY_MODE;
     mBframes = DEFAULT_B_FRAMES;
 
-    gettimeofday(&mTimeStart, nullptr);
-    gettimeofday(&mTimeEnd, nullptr);
+    mTimeStart = mTimeEnd = systemTime();
 }
 
 c2_status_t C2SoftAvcEnc::setDimensions() {
@@ -1515,7 +1514,8 @@
             vPlane = uPlane + yPlaneSize / 4;
             yStride = width;
             uStride = vStride = yStride / 2;
-            ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *input);
+            ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *input,
+                                  mColorAspects->matrix, mColorAspects->range);
             break;
         }
         case C2PlanarLayout::TYPE_YUV: {
@@ -1650,8 +1650,7 @@
     work->worklets.front()->output.flags = work->input.flags;
 
     IV_STATUS_T status;
-    WORD32 timeDelay = 0;
-    WORD32 timeTaken = 0;
+    nsecs_t timeDelay = 0;
     uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
 
     // Initialize encoder if not already initialized
@@ -1817,10 +1816,10 @@
         //         mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
         //         (mHeight * mStride * 3 / 2));
 
-        GETTIME(&mTimeStart, nullptr);
         /* Compute time elapsed between end of previous decode()
          * to start of current decode() */
-        TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+        mTimeStart = systemTime();
+        timeDelay = mTimeStart - mTimeEnd;
         status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
 
         if (IV_SUCCESS != status) {
@@ -1844,11 +1843,11 @@
         mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
     }
 
-    GETTIME(&mTimeEnd, nullptr);
     /* Compute time taken for decode() */
-    TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+    mTimeEnd = systemTime();
+    nsecs_t timeTaken = mTimeEnd - mTimeStart;
 
-    ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+    ALOGV("timeTaken=%" PRId64 "d delay=%" PRId64 " numBytes=%6d", timeTaken, timeDelay,
             ps_encode_op->s_out_buf.u4_bytes);
 
     void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 1fecd9e..293867d 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_AVC_ENC_H__
 
 #include <map>
+#include <inttypes.h>
 
 #include <utils/Vector.h>
 
@@ -115,14 +116,6 @@
 /** Used to remove warnings about unused parameters */
 #define UNUSED(x) ((void)(x))
 
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b);
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff) \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
-
 #define ive_aligned_malloc(alignment, size) memalign(alignment, size)
 #define ive_aligned_free(buf) free(buf)
 
@@ -148,6 +141,7 @@
     virtual ~C2SoftAvcEnc();
 
 private:
+    // RBE What does OMX have to do with the c2 plugin?
     // OMX input buffer's timestamp and flags
     typedef struct {
         int64_t mTimeUs;
@@ -158,8 +152,8 @@
 
     int32_t mStride;
 
-    struct timeval mTimeStart;   // Time at the start of decode()
-    struct timeval mTimeEnd;     // Time at the end of decode()
+    nsecs_t mTimeStart = 0;   // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;     // Time at the end of decode()
 
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
@@ -259,14 +253,14 @@
 #define OUTPUT_DUMP_EXT     "h264"
 
 #define GENERATE_FILE_NAMES() {                         \
-    GETTIME(&mTimeStart, NULL);                         \
+    nsecs_t now = systemTime();                         \
     strcpy(mInFile, "");                                \
-    sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH,  \
-            mTimeStart.tv_sec, mTimeStart.tv_usec,      \
+    sprintf(mInFile, "%s_%" PRId64 "d.%s",              \
+            INPUT_DUMP_PATH, now,                       \
             INPUT_DUMP_EXT);                            \
     strcpy(mOutFile, "");                               \
-    sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
-            mTimeStart.tv_sec, mTimeStart.tv_usec,      \
+    sprintf(mOutFile, "%s_%" PRId64 "d.%s",             \
+            OUTPUT_DUMP_PATH, now,                      \
             OUTPUT_DUMP_EXT);                           \
 }
 
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 160e250..664647a 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -9,6 +9,16 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_library_headers {
+    name: "libcodec2_soft_common_headers",
+    defaults: ["libcodec2-impl-defaults"],
+    vendor_available: true,
+
+    export_include_dirs: [
+        "include",
+    ],
+}
+
 cc_library {
     name: "libcodec2_soft_common",
     defaults: ["libcodec2-impl-defaults"],
@@ -27,6 +37,11 @@
         "libsfplugin_ccodec_utils",
     ],
 
+    header_libs: [
+        "libarect_headers",
+        "libnativewindow_headers",
+    ],
+
     shared_libs: [
         "libcutils", // for properties
         "liblog", // for ALOG
@@ -34,6 +49,12 @@
         "libstagefright_foundation", // for Mutexed
     ],
 
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -61,6 +82,11 @@
         "libsfplugin_ccodec_utils",
     ],
 
+    header_libs: [
+        "libarect_headers",
+        "libnativewindow_headers",
+    ],
+
     shared_libs: [
         "libcodec2_soft_common",
         "libcutils", // for properties
@@ -75,6 +101,12 @@
     ],
 
     ldflags: ["-Wl,-Bsymbolic"],
+
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 }
 
 // public dependency for software codec implementation
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 6c4b7d9..9d4f049 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "SimpleC2Component"
 #include <log/log.h>
 
+#include <android/hardware_buffer.h>
 #include <cutils/properties.h>
 #include <media/stagefright/foundation/AMessage.h>
 
@@ -26,10 +27,393 @@
 #include <C2Config.h>
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
 #include <SimpleC2Component.h>
 
 namespace android {
+constexpr uint8_t kNeutralUVBitDepth8 = 128;
+constexpr uint16_t kNeutralUVBitDepth10 = 512;
 
+void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
+                                const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
+                                size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                size_t dstUVStride, uint32_t width, uint32_t height,
+                                bool isMonochrome) {
+    for (size_t i = 0; i < height; ++i) {
+        memcpy(dstY, srcY, width);
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t i = 0; i < (height + 1) / 2; ++i) {
+            memset(dstV, kNeutralUVBitDepth8, (width + 1) / 2);
+            memset(dstU, kNeutralUVBitDepth8, (width + 1) / 2);
+            dstV += dstUVStride;
+            dstU += dstUVStride;
+        }
+        return;
+    }
+
+    for (size_t i = 0; i < (height + 1) / 2; ++i) {
+        memcpy(dstV, srcV, (width + 1) / 2);
+        srcV += srcVStride;
+        dstV += dstUVStride;
+    }
+
+    for (size_t i = 0; i < (height + 1) / 2; ++i) {
+        memcpy(dstU, srcU, (width + 1) / 2);
+        srcU += srcUStride;
+        dstU += dstUVStride;
+    }
+}
+
+void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+                                 const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+                                 size_t srcVStride, size_t dstStride, size_t width, size_t height) {
+    // Converting two lines at a time, slightly faster
+    for (size_t y = 0; y < height; y += 2) {
+        uint32_t *dstTop = (uint32_t *)dst;
+        uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+        uint16_t *ySrcTop = (uint16_t *)srcY;
+        uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+        uint16_t *uSrc = (uint16_t *)srcU;
+        uint16_t *vSrc = (uint16_t *)srcV;
+
+        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+        size_t x = 0;
+        for (; x < width - 3; x += 4) {
+            u01 = *((uint32_t *)uSrc);
+            uSrc += 2;
+            v01 = *((uint32_t *)vSrc);
+            vSrc += 2;
+
+            y01 = *((uint32_t *)ySrcTop);
+            ySrcTop += 2;
+            y23 = *((uint32_t *)ySrcTop);
+            ySrcTop += 2;
+            y45 = *((uint32_t *)ySrcBot);
+            ySrcBot += 2;
+            y67 = *((uint32_t *)ySrcBot);
+            ySrcBot += 2;
+
+            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+            *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
+            *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
+            *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
+            *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
+
+            *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
+            *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
+            *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
+            *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
+        }
+
+        // There should be at most 2 more pixels to process. Note that we don't
+        // need to consider odd case as the buffer is always aligned to even.
+        if (x < width) {
+            u01 = *uSrc;
+            v01 = *vSrc;
+            y01 = *((uint32_t *)ySrcTop);
+            y45 = *((uint32_t *)ySrcBot);
+            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+            *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
+            *dstTop++ = ((y01 >> 16) << 10) | uv0;
+            *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
+            *dstBot++ = ((y45 >> 16) << 10) | uv0;
+        }
+
+        srcY += srcYStride * 2;
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dst += dstStride * 2;
+    }
+}
+
+namespace {
+
+static C2ColorAspectsStruct FillMissingColorAspects(
+        std::shared_ptr<const C2ColorAspectsStruct> aspects,
+        int32_t width, int32_t height) {
+    C2ColorAspectsStruct _aspects;
+    if (aspects) {
+        _aspects = *aspects;
+    }
+
+    // use matrix for conversion
+    if (_aspects.matrix == C2Color::MATRIX_UNSPECIFIED) {
+        // if not specified, deduce matrix from primaries
+        if (_aspects.primaries == C2Color::PRIMARIES_UNSPECIFIED) {
+            // if those are also not specified, deduce primaries first from transfer, then from
+            // width and height
+            if (_aspects.transfer == C2Color::TRANSFER_ST2084
+                    || _aspects.transfer == C2Color::TRANSFER_HLG) {
+                _aspects.primaries = C2Color::PRIMARIES_BT2020;
+            } else if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
+                // TODO: stagefright defaults to BT.2020 for UHD, but perhaps we should default to
+                // BT.709 for non-HDR 10-bit UHD content
+                // (see media/libstagefright/foundation/ColorUtils.cpp)
+                _aspects.primaries = C2Color::PRIMARIES_BT2020;
+            } else if ((width <= 720 && height <= 576)
+                    || (height <= 720 && width <= 576)) {
+                // note: it does not actually matter whether to use 525 or 625 here as the
+                // conversion is the same
+                _aspects.primaries = C2Color::PRIMARIES_BT601_625;
+            } else {
+                _aspects.primaries = C2Color::PRIMARIES_BT709;
+            }
+        }
+
+        switch (_aspects.primaries) {
+        case C2Color::PRIMARIES_BT601_525:
+        case C2Color::PRIMARIES_BT601_625:
+            _aspects.matrix = C2Color::MATRIX_BT601;
+            break;
+
+        case C2Color::PRIMARIES_BT709:
+            _aspects.matrix = C2Color::MATRIX_BT709;
+            break;
+
+        case C2Color::PRIMARIES_BT2020:
+        default:
+            _aspects.matrix = C2Color::MATRIX_BT2020;
+        }
+    }
+
+    return _aspects;
+}
+
+// matrix conversion coefficients
+// (see media/libstagefright/colorconverter/ColorConverter.cpp for more details)
+struct Coeffs {
+    int32_t _y, _b_u, _g_u, _g_v, _r_v, _c16;
+};
+
+static const struct Coeffs GetCoeffsForAspects(const C2ColorAspectsStruct &aspects) {
+    bool isFullRange = aspects.range == C2Color::RANGE_FULL;
+
+    switch (aspects.matrix) {
+    case C2Color::MATRIX_BT601:
+        /**
+         * BT.601:  K_R = 0.299;  K_B = 0.114
+         */
+        if (isFullRange) {
+            return Coeffs { 1024, 1436, 352, 731, 1815, 0 };
+        } else {
+            return Coeffs { 1196, 1639, 402, 835, 2072, 64 };
+        }
+        break;
+
+    case C2Color::MATRIX_BT709:
+        /**
+         * BT.709:  K_R = 0.2126;  K_B = 0.0722
+         */
+        if (isFullRange) {
+            return Coeffs { 1024, 1613, 192, 479, 1900, 0 };
+        } else {
+            return Coeffs { 1196, 1841, 219, 547, 2169, 64 };
+        }
+        break;
+
+    case C2Color::MATRIX_BT2020:
+    default:
+        /**
+         * BT.2020:  K_R = 0.2627;  K_B = 0.0593
+         */
+        if (isFullRange) {
+            return Coeffs { 1024, 1510, 169, 585, 1927, 0 };
+        } else {
+            return Coeffs { 1196, 1724, 192, 668, 2200, 64 };
+        }
+    }
+}
+
+}
+
+#define CLIP3(min, v, max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
+void convertYUV420Planar16ToRGBA1010102(
+        uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+        const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+        size_t srcVStride, size_t dstStride, size_t width,
+        size_t height,
+        std::shared_ptr<const C2ColorAspectsStruct> aspects) {
+
+    C2ColorAspectsStruct _aspects = FillMissingColorAspects(aspects, width, height);
+
+    struct Coeffs coeffs = GetCoeffsForAspects(_aspects);
+
+    int32_t _y = coeffs._y;
+    int32_t _b_u = coeffs._b_u;
+    int32_t _neg_g_u = -coeffs._g_u;
+    int32_t _neg_g_v = -coeffs._g_v;
+    int32_t _r_v = coeffs._r_v;
+    int32_t _c16 = coeffs._c16;
+
+    // Converting two lines at a time, slightly faster
+    for (size_t y = 0; y < height; y += 2) {
+        uint32_t *dstTop = (uint32_t *)dst;
+        uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+        uint16_t *ySrcTop = (uint16_t *)srcY;
+        uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+        uint16_t *uSrc = (uint16_t *)srcU;
+        uint16_t *vSrc = (uint16_t *)srcV;
+
+        for (size_t x = 0; x < width; x += 2) {
+            int32_t u, v, y00, y01, y10, y11;
+            u = *uSrc - 512;
+            uSrc += 1;
+            v = *vSrc - 512;
+            vSrc += 1;
+
+            y00 = *ySrcTop - _c16;
+            ySrcTop += 1;
+            y01 = *ySrcTop - _c16;
+            ySrcTop += 1;
+            y10 = *ySrcBot - _c16;
+            ySrcBot += 1;
+            y11 = *ySrcBot - _c16;
+            ySrcBot += 1;
+
+            int32_t u_b = u * _b_u;
+            int32_t u_g = u * _neg_g_u;
+            int32_t v_g = v * _neg_g_v;
+            int32_t v_r = v * _r_v;
+
+            int32_t yMult, b, g, r;
+            yMult = y00 * _y + 512;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y01 * _y + 512;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y10 * _y + 512;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y11 * _y + 512;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+        }
+
+        srcY += srcYStride * 2;
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dst += dstStride * 2;
+    }
+}
+
+void convertYUV420Planar16ToY410OrRGBA1010102(
+        uint32_t *dst, const uint16_t *srcY,
+        const uint16_t *srcU, const uint16_t *srcV,
+        size_t srcYStride, size_t srcUStride,
+        size_t srcVStride, size_t dstStride, size_t width, size_t height,
+        std::shared_ptr<const C2ColorAspectsStruct> aspects) {
+    if (isAtLeastT()) {
+        convertYUV420Planar16ToRGBA1010102(dst, srcY, srcU, srcV, srcYStride, srcUStride,
+                                           srcVStride, dstStride, width, height, aspects);
+    } else {
+        convertYUV420Planar16ToY410(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                    dstStride, width, height);
+    }
+}
+
+void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = (uint8_t)(srcY[x] >> 2);
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t y = 0; y < (height + 1) / 2; ++y) {
+            memset(dstV, kNeutralUVBitDepth8, (width + 1) / 2);
+            memset(dstU, kNeutralUVBitDepth8, (width + 1) / 2);
+            dstV += dstUVStride;
+            dstU += dstUVStride;
+        }
+        return;
+    }
+
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        for (size_t x = 0; x < (width + 1) / 2; ++x) {
+            dstU[x] = (uint8_t)(srcU[x] >> 2);
+            dstV[x] = (uint8_t)(srcV[x] >> 2);
+        }
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dstU += dstUVStride;
+        dstV += dstUVStride;
+    }
+}
+
+void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] << 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t y = 0; y < (height + 1) / 2; ++y) {
+            for (size_t x = 0; x < (width + 1) / 2; ++x) {
+                dstUV[2 * x] = kNeutralUVBitDepth10 << 6;
+                dstUV[2 * x + 1] = kNeutralUVBitDepth10 << 6;
+            }
+            dstUV += dstUVStride;
+        }
+        return;
+    }
+
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        for (size_t x = 0; x < (width + 1) / 2; ++x) {
+            dstUV[2 * x] = srcU[x] << 6;
+            dstUV[2 * x + 1] = srcV[x] << 6;
+        }
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dstUV += dstUVStride;
+    }
+}
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
@@ -591,6 +975,37 @@
     return hasQueuedWork;
 }
 
+int SimpleC2Component::getHalPixelFormatForBitDepth10(bool allowRGBA1010102) {
+    // Save supported hal pixel formats for bit depth of 10, the first time this is called
+    if (!mBitDepth10HalPixelFormats.size()) {
+        std::vector<int> halPixelFormats;
+        halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+
+        // since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
+        // is populated only once, allowRGBA1010102 is not considered at this stage.
+        halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
+
+        for (int halPixelFormat : halPixelFormats) {
+            if (isHalPixelFormatSupported((AHardwareBuffer_Format)halPixelFormat)) {
+                mBitDepth10HalPixelFormats.push_back(halPixelFormat);
+            }
+        }
+        // Add YV12 in the end as a fall-back option
+        mBitDepth10HalPixelFormats.push_back(HAL_PIXEL_FORMAT_YV12);
+    }
+    // From Android T onwards, HAL_PIXEL_FORMAT_RGBA_1010102 corresponds to true
+    // RGBA 1010102 format unlike earlier versions where it was used to represent
+    // YUVA 1010102 data
+    if (!isAtLeastT()) {
+        // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
+        // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
+        if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            return HAL_PIXEL_FORMAT_YV12;
+        }
+    }
+    // Return the first entry from supported formats
+    return mBitDepth10HalPixelFormats[0];
+}
 std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
         const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
     return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp
index 29740d1..993e602 100644
--- a/media/codec2/components/base/SimpleC2Interface.cpp
+++ b/media/codec2/components/base/SimpleC2Interface.cpp
@@ -28,6 +28,14 @@
 
 /* SimpleInterface */
 
+static C2R SubscribedParamIndicesSetter(
+        bool mayBlock, C2InterfaceHelper::C2P<C2SubscribedParamIndicesTuning> &me) {
+    (void)mayBlock;
+    (void)me;
+
+    return C2R::Ok();
+}
+
 SimpleInterface<void>::BaseParams::BaseParams(
         const std::shared_ptr<C2ReflectorHelper> &reflector,
         C2String name,
@@ -186,7 +194,7 @@
             .withDefault(C2SubscribedParamIndicesTuning::AllocShared(0u))
             .withFields({ C2F(mSubscribedParamIndices, m.values[0]).any(),
                           C2F(mSubscribedParamIndices, m.values).any() })
-            .withSetter(Setter<C2SubscribedParamIndicesTuning>::NonStrictValuesWithNoDeps)
+            .withSetter(SubscribedParamIndicesSetter)
             .build());
 
     /* TODO
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index e5e16d8..7600c5b 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -26,8 +26,35 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/Mutexed.h>
 
+struct C2ColorAspectsStruct;
+
 namespace android {
 
+void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
+                                const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
+                                size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                size_t dstUVStride, uint32_t width, uint32_t height,
+                                bool isMonochrome = false);
+
+void convertYUV420Planar16ToY410OrRGBA1010102(
+        uint32_t *dst, const uint16_t *srcY,
+        const uint16_t *srcU, const uint16_t *srcV,
+        size_t srcYStride, size_t srcUStride,
+        size_t srcVStride, size_t dstStride, size_t width, size_t height,
+        std::shared_ptr<const C2ColorAspectsStruct> aspects = nullptr);
+
+void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome = false);
+
+void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome = false);
+
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
 public:
@@ -149,6 +176,7 @@
     static constexpr uint32_t NO_DRAIN = ~0u;
 
     C2ReadView mDummyReadView;
+    int getHalPixelFormatForBitDepth10(bool allowRGBA1010102);
 
 private:
     const std::shared_ptr<C2ComponentInterface> mIntf;
@@ -232,6 +260,7 @@
     class BlockingBlockPool;
     std::shared_ptr<BlockingBlockPool> mOutputBlockPool;
 
+    std::vector<int> mBitDepth10HalPixelFormats;
     SimpleC2Component() = delete;
 };
 
diff --git a/media/codec2/components/base/include/SimpleC2Interface.h b/media/codec2/components/base/include/SimpleC2Interface.h
index 2051d3d0..916f392 100644
--- a/media/codec2/components/base/include/SimpleC2Interface.h
+++ b/media/codec2/components/base/include/SimpleC2Interface.h
@@ -209,6 +209,7 @@
         return me.F(me.v.value).validatePossible(me.v.value);
     }
 
+    // TODO(b/230146771): fix crash
     static C2R NonStrictValuesWithNoDeps(
             bool mayBlock, C2InterfaceHelper::C2P<type> &me) {
         (void)mayBlock;
diff --git a/media/codec2/components/cmds/Android.bp b/media/codec2/components/cmds/Android.bp
index d6ffd12..2a11c01 100644
--- a/media/codec2/components/cmds/Android.bp
+++ b/media/codec2/components/cmds/Android.bp
@@ -15,9 +15,6 @@
         "codec2.cpp",
     ],
 
-    include_dirs: [
-    ],
-
     header_libs: [
         "libmediadrm_headers",
     ],
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 6fead3a..182edfb 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -439,9 +439,6 @@
     }
     FLAC__bool ok = FLAC__stream_encoder_finish(mFlacStreamEncoder);
     if (!ok) return C2_CORRUPTED;
-    mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
-    mProcessedSamples = 0u;
 
     return C2_OK;
 }
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 7692d37..162339f 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -22,4 +22,10 @@
 
     srcs: ["C2SoftGav1Dec.cpp"],
     static_libs: ["libgav1"],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
 }
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index f857e87..4dec57f 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -20,21 +20,21 @@
 
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
 #include <SimpleC2Interface.h>
 #include <log/log.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
-namespace {
-
-constexpr uint8_t NEUTRAL_UV_VALUE = 128;
-
-}  // namespace
 
 // codecname set and passed in as a compile flag from Android.bp
 constexpr char COMPONENT_NAME[] = CODECNAME;
 
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
 class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
  public:
   explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
@@ -56,8 +56,8 @@
         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
             .withFields({
-                C2F(mSize, width).inRange(2, 4096, 2),
-                C2F(mSize, height).inRange(2, 4096, 2),
+                C2F(mSize, width).inRange(2, 4096),
+                C2F(mSize, height).inRange(2, 4096),
             })
             .withSetter(SizeSetter)
             .build());
@@ -113,8 +113,7 @@
             .build());
 
     addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
-                     .withDefault(new C2StreamMaxBufferSizeInfo::input(
-                         0u, 320 * 240 * 3 / 4))
+                     .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
                      .withFields({
                          C2F(mMaxInputSize, value).any(),
                      })
@@ -156,11 +155,59 @@
             .withSetter(DefaultColorAspectsSetter)
             .build());
 
+      addParameter(
+              DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+              .withDefault(new C2StreamColorAspectsInfo::input(
+                      0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                      C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+              .withFields({
+                  C2F(mCodedColorAspects, range).inRange(
+                              C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                  C2F(mCodedColorAspects, primaries).inRange(
+                              C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                  C2F(mCodedColorAspects, transfer).inRange(
+                              C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                  C2F(mCodedColorAspects, matrix).inRange(
+                              C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+              })
+              .withSetter(CodedColorAspectsSetter)
+              .build());
+
+      addParameter(
+              DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+              .withDefault(new C2StreamColorAspectsInfo::output(
+                      0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                      C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+              .withFields({
+                  C2F(mColorAspects, range).inRange(
+                              C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                  C2F(mColorAspects, primaries).inRange(
+                              C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                  C2F(mColorAspects, transfer).inRange(
+                              C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                  C2F(mColorAspects, matrix).inRange(
+                              C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+              })
+              .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+              .build());
+
+    std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+    if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+    }
+    // If color format surface isn't added to supported formats, there is no way to know
+    // when the color-format is configured to surface. This is necessary to be able to
+    // choose 10-bit format while decoding 10-bit clips in surface mode.
+    pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
     // TODO: support more formats?
-    addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
-                     .withConstValue(new C2StreamPixelFormatInfo::output(
-                         0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
-                     .build());
+    addParameter(
+            DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+            .withDefault(new C2StreamPixelFormatInfo::output(
+                              0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+            .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+            .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+            .build());
   }
 
   static C2R SizeSetter(bool mayBlock,
@@ -194,9 +241,9 @@
       bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
       const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
     (void)mayBlock;
-    // assume compression ratio of 2
-    me.set().value =
-        (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072);
+    // assume compression ratio of 2, but enforce a floor
+    me.set().value = c2_max((((maxSize.v.width + 63) / 64)
+                * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
     return C2R::Ok();
   }
 
@@ -218,6 +265,37 @@
     return C2R::Ok();
   }
 
+  static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+      me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+      me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+      me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+      me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+  }
+
+  static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+                                const C2P<C2StreamColorAspectsTuning::output> &def,
+                                const C2P<C2StreamColorAspectsInfo::input> &coded) {
+    (void)mayBlock;
+    // take default values for all unspecified fields, and coded values for specified ones
+    me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+    me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+        ? def.v.primaries : coded.v.primaries;
+    me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+        ? def.v.transfer : coded.v.transfer;
+    me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+    return C2R::Ok();
+  }
+
   static C2R ProfileLevelSetter(
       bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
       const C2P<C2StreamPictureSizeInfo::output> &size) {
@@ -232,6 +310,10 @@
     return mDefaultColorAspects;
   }
 
+  std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+      return mColorAspects;
+  }
+
   static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
                                       C2P<C2StreamHdr10PlusInfo::input> &me) {
     (void)mayBlock;
@@ -246,6 +328,9 @@
     return C2R::Ok();
   }
 
+  // unsafe getters
+  std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
+
  private:
   std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
   std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -254,6 +339,8 @@
   std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
   std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
   std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+  std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+  std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
   std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
   std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
 };
@@ -264,8 +351,7 @@
           std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
       mCodecCtx(nullptr) {
-  gettimeofday(&mTimeStart, nullptr);
-  gettimeofday(&mTimeEnd, nullptr);
+  mTimeStart = mTimeEnd = systemTime();
 }
 
 C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
@@ -332,6 +418,11 @@
 bool C2SoftGav1Dec::initDecoder() {
   mSignalledError = false;
   mSignalledOutputEos = false;
+  mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+  {
+      IntfImpl::Lock lock = mIntf->lock();
+      mPixelFormatInfo = mIntf->getPixelFormat_l();
+  }
   mCodecCtx.reset(new libgav1::Decoder());
 
   if (mCodecCtx == nullptr) {
@@ -371,6 +462,10 @@
                                const std::shared_ptr<C2GraphicBlock> &block) {
   std::shared_ptr<C2Buffer> buffer =
       createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+  {
+      IntfImpl::Lock lock = mIntf->lock();
+      buffer->setInfo(mIntf->getColorAspects_l());
+  }
   auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
     uint32_t flags = 0;
     if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
@@ -431,19 +526,17 @@
   int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
   if (inSize) {
     uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
-    int32_t decodeTime = 0;
-    int32_t delay = 0;
 
-    GETTIME(&mTimeStart, nullptr);
-    TIME_DIFF(mTimeEnd, mTimeStart, delay);
+    mTimeStart = systemTime();
+    nsecs_t delay = mTimeStart - mTimeEnd;
 
     const Libgav1StatusCode status =
         mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
                                 /*buffer_private_data=*/nullptr);
 
-    GETTIME(&mTimeEnd, nullptr);
-    TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
-    ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
+    mTimeEnd = systemTime();
+    nsecs_t decodeTime = mTimeEnd - mTimeStart;
+    ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
 
     if (status != kLibgav1StatusOk) {
       ALOGE("av1 decoder failed to decode frame. status: %d.", status);
@@ -465,148 +558,36 @@
   }
 }
 
-static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-                                        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
-                                        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-                                        size_t dstYStride, size_t dstUVStride,
-                                        uint32_t width, uint32_t height,
-                                        bool isMonochrome) {
+void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
+    VuiColorAspects vuiColorAspects;
+    vuiColorAspects.primaries = buffer->color_primary;
+    vuiColorAspects.transfer = buffer->transfer_characteristics;
+    vuiColorAspects.coeffs = buffer->matrix_coefficients;
+    vuiColorAspects.fullRange = buffer->color_range;
 
-  for (size_t i = 0; i < height; ++i) {
-    memcpy(dstY, srcY, width);
-    srcY += srcYStride;
-    dstY += dstYStride;
-  }
-
-  if (isMonochrome) {
-    // Fill with neutral U/V values.
-    for (size_t i = 0; i < height / 2; ++i) {
-      memset(dstV, NEUTRAL_UV_VALUE, width / 2);
-      memset(dstU, NEUTRAL_UV_VALUE, width / 2);
-      dstV += dstUVStride;
-      dstU += dstUVStride;
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = { 0u };
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
     }
-    return;
-  }
-
-  for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dstV, srcV, width / 2);
-    srcV += srcVStride;
-    dstV += dstUVStride;
-  }
-
-  for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dstU, srcU, width / 2);
-    srcU += srcUStride;
-    dstU += dstUVStride;
-  }
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY,
-                                        const uint16_t *srcU,
-                                        const uint16_t *srcV, size_t srcYStride,
-                                        size_t srcUStride, size_t srcVStride,
-                                        size_t dstStride, size_t width,
-                                        size_t height) {
-  // Converting two lines at a time, slightly faster
-  for (size_t y = 0; y < height; y += 2) {
-    uint32_t *dstTop = (uint32_t *)dst;
-    uint32_t *dstBot = (uint32_t *)(dst + dstStride);
-    uint16_t *ySrcTop = (uint16_t *)srcY;
-    uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
-    uint16_t *uSrc = (uint16_t *)srcU;
-    uint16_t *vSrc = (uint16_t *)srcV;
-
-    uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
-    size_t x = 0;
-    for (; x < width - 3; x += 4) {
-      u01 = *((uint32_t *)uSrc);
-      uSrc += 2;
-      v01 = *((uint32_t *)vSrc);
-      vSrc += 2;
-
-      y01 = *((uint32_t *)ySrcTop);
-      ySrcTop += 2;
-      y23 = *((uint32_t *)ySrcTop);
-      ySrcTop += 2;
-      y45 = *((uint32_t *)ySrcBot);
-      ySrcBot += 2;
-      y67 = *((uint32_t *)ySrcBot);
-      ySrcBot += 2;
-
-      uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-      uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
-      *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
-      *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
-      *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
-      *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
-      *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
-      *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
-      *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
-      *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
-    }
-
-    // There should be at most 2 more pixels to process. Note that we don't
-    // need to consider odd case as the buffer is always aligned to even.
-    if (x < width) {
-      u01 = *uSrc;
-      v01 = *vSrc;
-      y01 = *((uint32_t *)ySrcTop);
-      y45 = *((uint32_t *)ySrcBot);
-      uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-      *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
-      *dstTop++ = ((y01 >> 16) << 10) | uv0;
-      *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
-      *dstBot++ = ((y45 >> 16) << 10) | uv0;
-    }
-
-    srcY += srcYStride * 2;
-    srcU += srcUStride;
-    srcV += srcVStride;
-    dst += dstStride * 2;
-  }
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
-    uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-    const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-    size_t srcYStride, size_t srcUStride, size_t srcVStride,
-    size_t dstYStride, size_t dstUVStride,
-    size_t width, size_t height, bool isMonochrome) {
-
-  for (size_t y = 0; y < height; ++y) {
-    for (size_t x = 0; x < width; ++x) {
-      dstY[x] = (uint8_t)(srcY[x] >> 2);
-    }
-
-    srcY += srcYStride;
-    dstY += dstYStride;
-  }
-
-  if (isMonochrome) {
-    // Fill with neutral U/V values.
-    for (size_t y = 0; y < (height + 1) / 2; ++y) {
-      memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
-      memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
-      dstV += dstUVStride;
-      dstU += dstUVStride;
-    }
-    return;
-  }
-
-  for (size_t y = 0; y < (height + 1) / 2; ++y) {
-    for (size_t x = 0; x < (width + 1) / 2; ++x) {
-      dstU[x] = (uint8_t)(srcU[x] >> 2);
-      dstV[x] = (uint8_t)(srcV[x] >> 2);
-    }
-
-    srcU += srcUStride;
-    srcV += srcVStride;
-    dstU += dstUVStride;
-    dstV += dstUVStride;
-  }
 }
 
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
@@ -651,6 +632,7 @@
     }
   }
 
+  getVuiParams(buffer);
   if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
         buffer->image_format == libgav1::kImageFormatMonochrome400)) {
     ALOGE("image_format %d not supported", buffer->image_format);
@@ -664,28 +646,52 @@
 
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
-  if (buffer->bitdepth == 10) {
+  std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
-    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
-        mIntf->getDefaultColorAspects_l();
-
-    if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
-        defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
-        defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
-      if (buffer->image_format != libgav1::kImageFormatYuv420) {
+    codedColorAspects = mIntf->getColorAspects_l();
+    bool allowRGBA1010102 = false;
+    if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+        codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+        codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+      allowRGBA1010102 = true;
+    }
+    format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
+        (buffer->image_format != libgav1::kImageFormatYuv420)) {
         ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
-        mSignalledError = true;
-        work->result = C2_OMITTED;
-        work->workletsProcessed = 1u;
-        return false;
-      }
-      format = HAL_PIXEL_FORMAT_RGBA_1010102;
+      mSignalledError = true;
+      work->result = C2_OMITTED;
+      work->workletsProcessed = 1u;
+      return false;
     }
   }
+
+  if (mHalPixelFormat != format) {
+    C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+    if (err == C2_OK) {
+      work->worklets.front()->output.configUpdate.push_back(
+          C2Param::Copy(pixelFormat));
+    } else {
+      ALOGE("Config update pixelFormat failed");
+      mSignalledError = true;
+      work->workletsProcessed = 1u;
+      work->result = C2_CORRUPTED;
+      return UNKNOWN_ERROR;
+    }
+    mHalPixelFormat = format;
+  }
+
   C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
 
-  c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format,
-                                            usage, &block);
+  // We always create a graphic block that is width aligned to 16 and height
+  // aligned to 2. We set the correct "crop" value of the image in the call to
+  // createGraphicBuffer() by setting the correct image dimensions.
+  c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
+                                            align(mHeight, 2), format, usage,
+                                            &block);
 
   if (err != C2_OK) {
     ALOGE("fetchGraphicBlock for Output failed with status %d", err);
@@ -721,22 +727,26 @@
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-      convertYUV420Planar16ToY410(
-          (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
-          srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
+        convertYUV420Planar16ToY410OrRGBA1010102(
+                (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
+                srcUStride / 2, srcVStride / 2,
+                dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+    } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+        convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+                                    srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
+                                    dstUVStride / 2, mWidth, mHeight, isMonochrome);
     } else {
-      convertYUV420Planar16ToYUV420Planar(
-          dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
-          srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
-          isMonochrome);
+        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+                                    srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride, mWidth,
+                                    mHeight, isMonochrome);
     }
   } else {
     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
-    copyOutputBufferToYV12Frame(
-        dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
-        dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
+    convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                               srcVStride, dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 555adc9..3d4db55 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -17,15 +17,15 @@
 #ifndef ANDROID_C2_SOFT_GAV1_DEC_H_
 #define ANDROID_C2_SOFT_GAV1_DEC_H_
 
+#include <inttypes.h>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
 #include <SimpleC2Component.h>
+#include <C2Config.h>
 #include "libgav1/src/gav1/decoder.h"
 #include "libgav1/src/gav1/decoder_settings.h"
 
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff)     \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
-
 namespace android {
 
 struct C2SoftGav1Dec : public SimpleC2Component {
@@ -51,15 +51,42 @@
   std::shared_ptr<IntfImpl> mIntf;
   std::unique_ptr<libgav1::Decoder> mCodecCtx;
 
+  // configurations used by component in process
+  // (TODO: keep this in intf but make them internal only)
+  std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+  uint32_t mHalPixelFormat;
   uint32_t mWidth;
   uint32_t mHeight;
   bool mSignalledOutputEos;
   bool mSignalledError;
 
-  struct timeval mTimeStart;  // Time at the start of decode()
-  struct timeval mTimeEnd;    // Time at the end of decode()
+  // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+  // converting them to C2 values for each frame
+  struct VuiColorAspects {
+      uint8_t primaries;
+      uint8_t transfer;
+      uint8_t coeffs;
+      uint8_t fullRange;
+
+      // default color aspects
+      VuiColorAspects()
+          : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+            transfer(C2Color::TRANSFER_UNSPECIFIED),
+            coeffs(C2Color::MATRIX_UNSPECIFIED),
+            fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+      bool operator==(const VuiColorAspects &o) {
+          return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+                  && fullRange == o.fullRange;
+      }
+  } mBitstreamColorAspects;
+
+  nsecs_t mTimeStart = 0;  // Time at the start of decode()
+  nsecs_t mTimeEnd = 0;    // Time at the end of decode()
 
   bool initDecoder();
+  void getVuiParams(const libgav1::DecoderBuffer *buffer);
   void destroyDecoder();
   void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
                   const std::shared_ptr<C2GraphicBlock>& block);
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 6bcf3a2..5a660c5 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -34,6 +34,7 @@
 constexpr char COMPONENT_NAME[] = "c2.android.hevc.decoder";
 constexpr uint32_t kDefaultOutputDelay = 8;
 constexpr uint32_t kMaxOutputDelay = 16;
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
 }  // namespace
 
 class C2SoftHevcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -108,7 +109,7 @@
 
         addParameter(
                 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
-                .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4))
+                .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
                 .withFields({
                     C2F(mMaxInputSize, value).any(),
                 })
@@ -220,8 +221,9 @@
     static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
                                   const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
         (void)mayBlock;
-        // assume compression ratio of 2
-        me.set().value = (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072);
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value = c2_max((((maxSize.v.width + 63) / 64)
+                    * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
         return C2R::Ok();
     }
 
@@ -502,7 +504,7 @@
 status_t C2SoftHevcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN32(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -661,8 +663,7 @@
 
 void C2SoftHevcDec::resetPlugin() {
     mSignalledOutputEos = false;
-    gettimeofday(&mTimeStart, nullptr);
-    gettimeofday(&mTimeEnd, nullptr);
+    mTimeStart = mTimeEnd = systemTime();
 }
 
 status_t C2SoftHevcDec::deleteDecoder() {
@@ -768,20 +769,20 @@
         return C2_CORRUPTED;
     }
     if (mOutBlock &&
-            (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+            (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
         mOutBlock.reset();
     }
     if (!mOutBlock) {
         uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
-            pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+            pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
         if (err != C2_OK) {
             ALOGE("fetchGraphicBlock for Output failed with status %d", err);
             return err;
         }
         ALOGV("provided (%dx%d) required (%dx%d)",
-              mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+              mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
     }
 
     return C2_OK;
@@ -856,14 +857,13 @@
             /* Decode header and get dimensions */
             setParams(mStride, IVD_DECODE_HEADER);
         }
-        WORD32 delay;
-        GETTIME(&mTimeStart, nullptr);
-        TIME_DIFF(mTimeEnd, mTimeStart, delay);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
         (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
-        WORD32 decodeTime;
-        GETTIME(&mTimeEnd, nullptr);
-        TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
-        ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+        mTimeEnd = systemTime();
+        nsecs_t decodeTime = mTimeEnd - mTimeStart;
+        ALOGV("decodeTime=%6" PRId64 " delay=%6" PRId64 " numBytes=%6d", decodeTime, delay,
               ps_decode_op->u4_num_bytes_consumed);
         if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
             ALOGE("allocation failure in decoder");
@@ -917,7 +917,7 @@
         if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (ps_decode_op->u4_pic_wd != mWidth ||  ps_decode_op->u4_pic_ht != mHeight) {
                 mWidth = ps_decode_op->u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index b9b0a48..6abf69e 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -20,6 +20,7 @@
 #include <media/stagefright/foundation/ColorUtils.h>
 
 #include <atomic>
+#include <inttypes.h>
 #include <SimpleC2Component.h>
 
 #include "ihevc_typedefs.h"
@@ -36,15 +37,11 @@
 #define ivdext_ctl_set_num_cores_op_t   ihevcd_cxa_ctl_set_num_cores_op_t
 #define ivdext_ctl_get_vui_params_ip_t  ihevcd_cxa_ctl_get_vui_params_ip_t
 #define ivdext_ctl_get_vui_params_op_t  ihevcd_cxa_ctl_get_vui_params_op_t
-#define ALIGN32(x)                      ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
 #define MIN(a, b)                       (((a) < (b)) ? (a) : (b))
-#define GETTIME(a, b)                   gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff)     \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
 
 
 struct C2SoftHevcDec : public SimpleC2Component {
@@ -142,8 +139,8 @@
     } mBitstreamColorAspects;
 
     // profile
-    struct timeval mTimeStart;
-    struct timeval mTimeEnd;
+    nsecs_t mTimeStart = 0;
+    nsecs_t mTimeEnd = 0;
 
     C2_DO_NOT_COPY(C2SoftHevcDec);
 };
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 4bc1777..60d5875 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -123,7 +123,7 @@
         // matches size limits in codec library
         addParameter(
             DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-                .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
                 .withFields({
                     C2F(mSize, width).inRange(2, 1920, 2),
                     C2F(mSize, height).inRange(2, 1088, 2),
@@ -133,7 +133,7 @@
 
         addParameter(
             DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-                .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+                .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
                 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
                 .withSetter(
                     Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
@@ -245,6 +245,19 @@
                 })
                 .withSetter(CodedColorAspectsSetter, mColorAspects)
                 .build());
+
+        addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
     }
 
     static C2R InputDelaySetter(
@@ -464,9 +477,69 @@
         me.set().matrix = coded.v.matrix;
         return C2R::Ok();
     }
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+
+        // these are the ones we're going to set, so want them to default
+        // to the DEFAULT values for the codec
+        int32_t iMin = HEVC_QP_MIN, pMin = HEVC_QP_MIN, bMin = HEVC_QP_MIN;
+        int32_t iMax = HEVC_QP_MAX, pMax = HEVC_QP_MAX, bMax = HEVC_QP_MAX;
+
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            // layerMin is clamped to [HEVC_QP_MIN, layerMax] to avoid error
+            // cases where layer.min > layer.max
+            int32_t layerMax = std::clamp(layer.max, HEVC_QP_MIN, HEVC_QP_MAX);
+            int32_t layerMin = std::clamp(layer.min, HEVC_QP_MIN, layerMax);
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                iMax = layerMax;
+                iMin = layerMin;
+                ALOGV("iMin %d iMax %d", iMin, iMax);
+            } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+                pMax = layerMax;
+                pMin = layerMin;
+                ALOGV("pMin %d pMax %d", pMin, pMax);
+            } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                bMax = layerMax;
+                bMin = layerMin;
+                ALOGV("bMin %d bMax %d", bMin, bMax);
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
+              iMin, iMax, pMin, pMax, bMin, bMax);
+
+        int32_t maxFrameQP = std::min(std::min(iMax, pMax), bMax);
+        int32_t minFrameQP = std::max(std::max(iMin, pMin), bMin);
+        if (minFrameQP > maxFrameQP) {
+            minFrameQP = maxFrameQP;
+        }
+
+        // put them back into the structure
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+                layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+                layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                me.set().m.values[i].max = maxFrameQP;
+                me.set().m.values[i].min = minFrameQP;
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(exit): i = p = b = %d-%d",
+              minFrameQP, maxFrameQP);
+
+        return C2R::Ok();
+    }
     std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() {
         return mCodedColorAspects;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -482,6 +555,7 @@
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 static size_t GetCPUCoreCount() {
@@ -517,8 +591,7 @@
     CREATE_DUMP_FILE(mInFile);
     CREATE_DUMP_FILE(mOutFile);
 
-    gettimeofday(&mTimeStart, nullptr);
-    gettimeofday(&mTimeEnd, nullptr);
+    mTimeStart = mTimeEnd = systemTime();
 }
 
 C2SoftHevcEnc::~C2SoftHevcEnc() {
@@ -654,12 +727,41 @@
         mEncParams.s_coding_tools_prms.i4_max_temporal_layers = 3;
     }
 
-    switch (mBitrateMode->value) {
-        case C2Config::BITRATE_IGNORE:
-            mEncParams.s_config_prms.i4_rate_control_mode = 3;
-            mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
-                getQpFromQuality(mQuality->value);
+    // we resolved out-of-bound and unspecified values in PictureQuantizationSetter()
+    // so we can start with defaults that are overridden as needed.
+    int32_t maxFrameQP = mEncParams.s_config_prms.i4_max_frame_qp;
+    int32_t minFrameQP = mEncParams.s_config_prms.i4_min_frame_qp;
+
+    for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+
+        // no need to loop, hevc library takes same range for I/P/B picture type
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+            layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+            layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+
+            maxFrameQP = layer.max;
+            minFrameQP = layer.min;
             break;
+        }
+    }
+    mEncParams.s_config_prms.i4_max_frame_qp = maxFrameQP;
+    mEncParams.s_config_prms.i4_min_frame_qp = minFrameQP;
+
+    ALOGV("MaxFrameQp: %d MinFrameQp: %d", maxFrameQP, minFrameQP);
+
+    mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+        std::clamp(kDefaultInitQP, minFrameQP, maxFrameQP);
+
+    switch (mBitrateMode->value) {
+        case C2Config::BITRATE_IGNORE: {
+            mEncParams.s_config_prms.i4_rate_control_mode = 3;
+            // ensure initial qp values are within our newly configured bounds
+            int32_t frameQp = getQpFromQuality(mQuality->value);
+            mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+                std::clamp(frameQp, minFrameQP, maxFrameQP);
+            break;
+        }
         case C2Config::BITRATE_CONST:
             mEncParams.s_config_prms.i4_rate_control_mode = 5;
             break;
@@ -723,6 +825,7 @@
         mGop = mIntf->getGop_l();
         mRequestSync = mIntf->getRequestSync_l();
         mColorAspects = mIntf->getCodedColorAspects_l();
+        mQpBounds = mIntf->getPictureQuantization_l();;
     }
 
     c2_status_t status = initEncParams();
@@ -798,7 +901,8 @@
             yStride = width;
             uStride = vStride = yStride / 2;
             ConvertRGBToPlanarYUV(yPlane, yStride, height,
-                                  conversionBuffer.size(), *input);
+                                  conversionBuffer.size(), *input,
+                                  mColorAspects->matrix, mColorAspects->range);
             break;
         }
         case C2PlanarLayout::TYPE_YUV: {
@@ -1099,11 +1203,11 @@
         }
     }
 
-    uint64_t timeDelay = 0;
-    uint64_t timeTaken = 0;
+    nsecs_t timeDelay = 0;
+    nsecs_t timeTaken = 0;
     memset(&s_encode_op, 0, sizeof(s_encode_op));
-    GETTIME(&mTimeStart, nullptr);
-    TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+    mTimeStart = systemTime();
+    timeDelay = mTimeStart - mTimeEnd;
 
     if (inputBuffer) {
         err = ihevce_encode(mCodecCtx, &s_encode_ip, &s_encode_op);
@@ -1118,12 +1222,12 @@
         fillEmptyWork(work);
     }
 
-    GETTIME(&mTimeEnd, nullptr);
     /* Compute time taken for decode() */
-    TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+    mTimeEnd = systemTime();
+    timeTaken = mTimeEnd - mTimeStart;
 
-    ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", (int)timeTaken,
-          (int)timeDelay, s_encode_op.i4_bytes_generated);
+    ALOGV("timeTaken=%6" PRId64 " delay=%6" PRId64 " numBytes=%6d", timeTaken,
+          timeDelay, s_encode_op.i4_bytes_generated);
 
     if (s_encode_op.i4_bytes_generated) {
         finishWork(s_encode_op.u8_pts, work, pool, &s_encode_op);
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 9dbf682..ce9cec8 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -19,6 +19,7 @@
 
 #include <SimpleC2Component.h>
 #include <algorithm>
+#include <inttypes.h>
 #include <map>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <utils/Vector.h>
@@ -27,14 +28,6 @@
 
 namespace android {
 
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b)
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff)                      \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-           ((end).tv_usec - (start).tv_usec);
-
 #define CODEC_MAX_CORES  4
 #define MAX_B_FRAMES     1
 #define MAX_RC_LOOKAHEAD 1
@@ -42,6 +35,11 @@
 #define DEFAULT_B_FRAMES     0
 #define DEFAULT_RC_LOOKAHEAD 0
 
+#define HEVC_QP_MIN 1
+#define HEVC_QP_MAX 51
+
+constexpr int32_t kDefaultInitQP = 32;
+
 struct C2SoftHevcEnc : public SimpleC2Component {
     class IntfImpl;
 
@@ -90,14 +88,15 @@
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
     char mOutFile[200];
 #endif /* FILE_DUMP_ENABLE */
 
     // profile
-    struct timeval mTimeStart;
-    struct timeval mTimeEnd;
+    nsecs_t mTimeStart = 0;
+    nsecs_t mTimeEnd = 0;
 
     c2_status_t initEncParams();
     c2_status_t initEncoder();
@@ -121,14 +120,12 @@
 #define OUTPUT_DUMP_EXT "h265"
 #define GENERATE_FILE_NAMES()                                             \
 {                                                                         \
-    GETTIME(&mTimeStart, NULL);                                           \
-    strcpy(mInFile, "");                                                  \
+    nsecs_t now = systemTime();                                           \
     ALOGD("GENERATE_FILE_NAMES");                                         \
-    sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
-            mTimeStart.tv_usec, INPUT_DUMP_EXT);                          \
-    strcpy(mOutFile, "");                                                 \
-    sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,                  \
-            mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT);      \
+    sprintf(mInFile, "%s_%" PRId64 ".%s", INPUT_DUMP_PATH,                \
+            mTimeStart, INPUT_DUMP_EXT);                                  \
+    sprintf(mutFile, "%s_%" PRId64 ".%s", OUTPUT_DUMP_PATH,               \
+            mTimeStart, OUTPUT_DUMP_EXT);                                 \
 }
 
 #define CREATE_DUMP_FILE(m_filename)                 \
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index e394670..149c6ee 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -321,6 +321,13 @@
     return C2_OK;
 }
 
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+    work->worklets.front()->output.flags = work->input.flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
 // TODO: Can overall error checking be improved? As in the check for validity of
 //       work, pool ptr, work->input.buffers.size() == 1, ...
 // TODO: Blind removal of 529 samples from the output may not work. Because
@@ -486,17 +493,17 @@
         }
     }
 
-    int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
-    mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
-    ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
-          outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
-    decodedSizes.clear();
-    work->worklets.front()->output.flags = work->input.flags;
-    work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(
-            createLinearBuffer(block, outOffset, outSize - outOffset));
-    work->worklets.front()->output.ordinal = work->input.ordinal;
-    work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+    fillEmptyWork(work);
+    if (samplingRate && numChannels) {
+        int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+        mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
+        ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
+               outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
+        decodedSizes.clear();
+        work->worklets.front()->output.buffers.push_back(
+                createLinearBuffer(block, outOffset, outSize - outOffset));
+        work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+    }
     if (eos) {
         mSignalledOutputEos = true;
         ALOGV("signalled EOS");
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index daa10ae..a58044c 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -17,9 +17,4 @@
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
-
-    include_dirs: [
-        "external/libmpeg2/decoder",
-        "external/libmpeg2/common",
-    ],
 }
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index b1cf388..9a41910 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -572,7 +572,7 @@
     if (OK != createDecoder()) return UNKNOWN_ERROR;
 
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN32(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -731,8 +731,7 @@
 
 void C2SoftMpeg2Dec::resetPlugin() {
     mSignalledOutputEos = false;
-    gettimeofday(&mTimeStart, nullptr);
-    gettimeofday(&mTimeEnd, nullptr);
+    mTimeStart = mTimeEnd = systemTime();
 }
 
 status_t C2SoftMpeg2Dec::deleteDecoder() {
@@ -845,20 +844,20 @@
         return C2_CORRUPTED;
     }
     if (mOutBlock &&
-            (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+            (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
         mOutBlock.reset();
     }
     if (!mOutBlock) {
         uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
-            pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+            pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
         if (err != C2_OK) {
             ALOGE("fetchGraphicBlock for Output failed with status %d", err);
             return err;
         }
         ALOGV("provided (%dx%d) required (%dx%d)",
-              mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+              mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
     }
 
     return C2_OK;
@@ -929,14 +928,12 @@
         }
         // If input dump is enabled, then write to file
         DUMP_TO_FILE(mInFile, s_decode_ip.pv_stream_buffer, s_decode_ip.u4_num_Bytes);
-        WORD32 delay;
-        GETTIME(&mTimeStart, nullptr);
-        TIME_DIFF(mTimeEnd, mTimeStart, delay);
+        nsecs_t delay = mTimeStart - mTimeEnd;
         (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
-        WORD32 decodeTime;
-        GETTIME(&mTimeEnd, nullptr);
-        TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
-        ALOGV("decodeTime=%6d delay=%6d numBytes=%6d ", decodeTime, delay,
+
+        mTimeEnd = systemTime();
+        nsecs_t decodeTime = mTimeEnd - mTimeStart;
+        ALOGV("decodeTime=%" PRId64 " delay=%" PRId64 " numBytes=%6d ", decodeTime, delay,
               s_decode_op.u4_num_bytes_consumed);
         if (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_decode_op.u4_error_code) {
             ALOGV("unsupported resolution : %dx%d", s_decode_op.u4_pic_wd, s_decode_op.u4_pic_ht);
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index fd66304a..3965bcc 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -18,6 +18,7 @@
 #define ANDROID_C2_SOFT_MPEG2_DEC_H_
 
 #include <atomic>
+#include <inttypes.h>
 #include <SimpleC2Component.h>
 
 #include <media/stagefright/foundation/ColorUtils.h>
@@ -37,24 +38,19 @@
 #define ivdext_ctl_set_num_cores_op_t   impeg2d_ctl_set_num_cores_op_t
 #define ivdext_ctl_get_seq_info_ip_t    impeg2d_ctl_get_seq_info_ip_t
 #define ivdext_ctl_get_seq_info_op_t    impeg2d_ctl_get_seq_info_op_t
-#define ALIGN32(x)                      ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
 #define MIN(a, b)                       (((a) < (b)) ? (a) : (b))
-#define GETTIME(a, b)                   gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff)     \
-    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
-            ((end).tv_usec - (start).tv_usec);
 
 #ifdef FILE_DUMP_ENABLE
     #define INPUT_DUMP_PATH     "/sdcard/clips/mpeg2d_input"
     #define INPUT_DUMP_EXT      "m2v"
     #define GENERATE_FILE_NAMES() {                         \
-        GETTIME(&mTimeStart, NULL);                         \
-        strcpy(mInFile, "");                                \
-        sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH,  \
-                mTimeStart.tv_sec, mTimeStart.tv_usec,      \
+        nsecs_t now = systemTime();                         \
+        sprintf(mInFile, "%s_%" PRId64 ".%s",               \
+                INPUT_DUMP_PATH, now,                       \
                 INPUT_DUMP_EXT);                            \
     }
     #define CREATE_DUMP_FILE(m_filename) {                  \
@@ -183,8 +179,8 @@
     } mBitstreamColorAspects;
 
     // profile
-    struct timeval mTimeStart;
-    struct timeval mTimeEnd;
+    nsecs_t mTimeStart = 0;
+    nsecs_t mTimeEnd = 0;
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
 #endif /* FILE_DUMP_ENABLE */
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index ddd312f..54a1d0e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -228,7 +228,6 @@
         const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mDecHandle(nullptr),
       mOutputBuffer{},
       mInitialized(false) {
 }
@@ -244,9 +243,7 @@
 
 c2_status_t C2SoftMpeg4Dec::onStop() {
     if (mInitialized) {
-        if (mDecHandle) {
-            PVCleanUpVideoDecoder(mDecHandle);
-        }
+        PVCleanUpVideoDecoder(&mVideoDecControls);
         mInitialized = false;
     }
     for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -269,28 +266,15 @@
 }
 
 void C2SoftMpeg4Dec::onRelease() {
-    if (mInitialized) {
-        if (mDecHandle) {
-            PVCleanUpVideoDecoder(mDecHandle);
-            delete mDecHandle;
-            mDecHandle = nullptr;
-        }
-        mInitialized = false;
-    }
+    (void)onStop();
     if (mOutBlock) {
         mOutBlock.reset();
     }
-    for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
-        if (mOutputBuffer[i]) {
-            free(mOutputBuffer[i]);
-            mOutputBuffer[i] = nullptr;
-        }
-    }
 }
 
 c2_status_t C2SoftMpeg4Dec::onFlush_sm() {
     if (mInitialized) {
-        if (PV_TRUE != PVResetVideoDecoder(mDecHandle)) {
+        if (PV_TRUE != PVResetVideoDecoder(&mVideoDecControls)) {
             return C2_CORRUPTED;
         }
     }
@@ -305,14 +289,8 @@
 #else
     mIsMpeg4 = false;
 #endif
-    if (!mDecHandle) {
-        mDecHandle = new tagvideoDecControls;
-    }
-    if (!mDecHandle) {
-        ALOGE("mDecHandle is null");
-        return NO_MEMORY;
-    }
-    memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+
+    memset(&mVideoDecControls, 0, sizeof(tagvideoDecControls));
 
     /* TODO: bring these values to 352 and 288. It cannot be done as of now
      * because, h263 doesn't seem to allow port reconfiguration. In OMX, the
@@ -368,10 +346,6 @@
 }
 
 c2_status_t C2SoftMpeg4Dec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
-    if (!mDecHandle) {
-        ALOGE("not supposed to be here, invalid decoder context");
-        return C2_CORRUPTED;
-    }
 
     mOutputBufferSize = align(mIntf->getMaxWidth(), 16) * align(mIntf->getMaxHeight(), 16) * 3 / 2;
     for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -402,10 +376,10 @@
 
 bool C2SoftMpeg4Dec::handleResChange(const std::unique_ptr<C2Work> &work) {
     uint32_t disp_width, disp_height;
-    PVGetVideoDimensions(mDecHandle, (int32 *)&disp_width, (int32 *)&disp_height);
+    PVGetVideoDimensions(&mVideoDecControls, (int32 *)&disp_width, (int32 *)&disp_height);
 
     uint32_t buf_width, buf_height;
-    PVGetBufferDimensions(mDecHandle, (int32 *)&buf_width, (int32 *)&buf_height);
+    PVGetBufferDimensions(&mVideoDecControls, (int32 *)&buf_width, (int32 *)&buf_height);
 
     CHECK_LE(disp_width, buf_width);
     CHECK_LE(disp_height, buf_height);
@@ -426,13 +400,14 @@
         }
 
         if (!mIsMpeg4) {
-            PVCleanUpVideoDecoder(mDecHandle);
+            PVCleanUpVideoDecoder(&mVideoDecControls);
 
             uint8_t *vol_data[1]{};
             int32_t vol_size = 0;
 
             if (!PVInitVideoDecoder(
-                    mDecHandle, vol_data, &vol_size, 1, mIntf->getMaxWidth(), mIntf->getMaxHeight(), H263_MODE)) {
+                    &mVideoDecControls, vol_data, &vol_size, 1, mIntf->getMaxWidth(),
+                                                        mIntf->getMaxHeight(), H263_MODE)) {
                 ALOGE("Error in PVInitVideoDecoder H263_MODE while resChanged was set to true");
                 mSignalledError = true;
                 work->result = C2_CORRUPTED;
@@ -444,40 +419,6 @@
     return resChanged;
 }
 
-/* TODO: can remove temporary copy after library supports writing to display
- * buffer Y, U and V plane pointers using stride info. */
-static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, uint8_t *src,
-        size_t dstYStride, size_t dstUVStride,
-        size_t srcYStride, uint32_t width,
-        uint32_t height) {
-    size_t srcUVStride = srcYStride / 2;
-    uint8_t *srcStart = src;
-
-    size_t vStride = align(height, 16);
-    for (size_t i = 0; i < height; ++i) {
-         memcpy(dstY, src, width);
-         src += srcYStride;
-         dstY += dstYStride;
-    }
-
-    /* U buffer */
-    src = srcStart + vStride * srcYStride;
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstU, src, width / 2);
-         src += srcUVStride;
-         dstU += dstUVStride;
-    }
-
-    /* V buffer */
-    src = srcStart + vStride * srcYStride * 5 / 4;
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstV, src, width / 2);
-         src += srcUVStride;
-         dstV += dstUVStride;
-    }
-}
-
 void C2SoftMpeg4Dec::process(
         const std::unique_ptr<C2Work> &work,
         const std::shared_ptr<C2BlockPool> &pool) {
@@ -522,7 +463,7 @@
     uint32_t *start_code = (uint32_t *)bitstream;
     bool volHeader = *start_code == 0xB0010000;
     if (volHeader) {
-        PVCleanUpVideoDecoder(mDecHandle);
+        PVCleanUpVideoDecoder(&mVideoDecControls);
         mInitialized = false;
     }
 
@@ -537,7 +478,7 @@
         }
         MP4DecodingMode mode = (mIsMpeg4) ? MPEG4_MODE : H263_MODE;
         if (!PVInitVideoDecoder(
-                mDecHandle, vol_data, &vol_size, 1,
+                &mVideoDecControls, vol_data, &vol_size, 1,
                 mIntf->getMaxWidth(), mIntf->getMaxHeight(), mode)) {
             ALOGE("PVInitVideoDecoder failed. Unsupported content?");
             mSignalledError = true;
@@ -545,7 +486,7 @@
             return;
         }
         mInitialized = true;
-        MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+        MP4DecodingMode actualMode = PVGetDecBitstreamMode(&mVideoDecControls);
         if (mode != actualMode) {
             ALOGE("Decoded mode not same as actual mode of the decoder");
             mSignalledError = true;
@@ -553,7 +494,7 @@
             return;
         }
 
-        PVSetPostProcType(mDecHandle, 0);
+        PVSetPostProcType(&mVideoDecControls, 0);
         if (handleResChange(work)) {
             ALOGI("Setting width and height");
             C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
@@ -590,7 +531,7 @@
             return;
         }
 
-        uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+        uint32_t yFrameSize = sizeof(uint8) * mVideoDecControls.size;
         if (mOutputBufferSize < yFrameSize * 3 / 2){
             ALOGE("Too small output buffer: %zu bytes", mOutputBufferSize);
             mSignalledError = true;
@@ -599,7 +540,7 @@
         }
 
         if (!mFramesConfigured) {
-            PVSetReferenceYUV(mDecHandle,mOutputBuffer[1]);
+            PVSetReferenceYUV(&mVideoDecControls,mOutputBuffer[1]);
             mFramesConfigured = true;
         }
 
@@ -610,7 +551,7 @@
         uint8_t *bitstreamTmp = bitstream;
         uint32_t timestamp = workIndex;
         if (PVDecodeVopHeader(
-                    mDecHandle, &bitstreamTmp, &timestamp, &tmpInSize,
+                    &mVideoDecControls, &bitstreamTmp, &timestamp, &tmpInSize,
                     &header_info, &useExtTimestamp,
                     mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
             ALOGE("failed to decode vop header.");
@@ -642,7 +583,7 @@
             continue;
         }
 
-        if (PVDecodeVopBody(mDecHandle, &tmpInSize) != PV_TRUE) {
+        if (PVDecodeVopBody(&mVideoDecControls, &tmpInSize) != PV_TRUE) {
             ALOGE("failed to decode video frame.");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
@@ -661,11 +602,17 @@
         C2PlanarLayout layout = wView.layout();
         size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
         size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
-        (void)copyOutputBufferToYuvPlanarFrame(
-                outputBufferY, outputBufferU, outputBufferV,
-                mOutputBuffer[mNumSamplesOutput & 1],
-                dstYStride, dstUVStride,
-                align(mWidth, 16), mWidth, mHeight);
+        size_t srcYStride = align(mWidth, 16);
+        size_t srcUStride = srcYStride / 2;
+        size_t srcVStride = srcYStride / 2;
+        size_t vStride = align(mHeight, 16);
+        const uint8_t *srcY = (const uint8_t *)mOutputBuffer[mNumSamplesOutput & 1];
+        const uint8_t *srcU = (const uint8_t *)srcY + vStride * srcYStride;
+        const uint8_t *srcV = (const uint8_t *)srcY + vStride * srcYStride * 5 / 4;
+
+        convertYUV420Planar8ToYV12(outputBufferY, outputBufferU, outputBufferV, srcY, srcU, srcV,
+                                   srcYStride, srcUStride, srcVStride, dstYStride, dstUVStride,
+                                   mWidth, mHeight);
 
         inPos += inSize - (size_t)tmpInSize;
         finishWork(workIndex, work);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
index 716a095..fed04c9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
@@ -19,8 +19,8 @@
 
 #include <SimpleC2Component.h>
 
+#include <mp4dec_api.h>
 
-struct tagvideoDecControls;
 
 namespace android {
 
@@ -54,7 +54,7 @@
     bool handleResChange(const std::unique_ptr<C2Work> &work);
 
     std::shared_ptr<IntfImpl> mIntf;
-    tagvideoDecControls *mDecHandle;
+    tagvideoDecControls mVideoDecControls;
     std::shared_ptr<C2GraphicBlock> mOutBlock;
     uint8_t *mOutputBuffer[kNumOutputBuffers];
     size_t  mOutputBufferSize;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 3c87531..3bfec66 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -436,16 +436,18 @@
         }
 
         ++mNumInputFrames;
-        std::unique_ptr<C2StreamInitDataInfo::output> csd =
-            C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
-        if (!csd) {
-            ALOGE("CSD allocation failed");
-            mSignalledError = true;
-            work->result = C2_NO_MEMORY;
-            return;
+        if (outputSize) {
+            std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
+            if (!csd) {
+                ALOGE("CSD allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(csd->m.value, outPtr, outputSize);
+            work->worklets.front()->output.configUpdate.push_back(std::move(csd));
         }
-        memcpy(csd->m.value, outPtr, outputSize);
-        work->worklets.front()->output.configUpdate.push_back(std::move(csd));
     }
 
     // handle dynamic bitrate change
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index 370d33c..cdc3be0 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -245,7 +245,7 @@
     mIsFirstFrame = true;
     mEncoderFlushed = false;
     mBufferAvailable = false;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0;
     mFilledLen = 0;
     mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
@@ -266,7 +266,7 @@
     mIsFirstFrame = true;
     mEncoderFlushed = false;
     mBufferAvailable = false;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mFilledLen = 0;
     if (mEncoder) {
@@ -363,7 +363,7 @@
         }
     }
     if (mIsFirstFrame && inSize > 0) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -386,7 +386,7 @@
     size_t inPos = 0;
     size_t processSize = 0;
     mBytesEncoded = 0;
-    uint64_t outTimeStamp = 0u;
+    int64_t outTimeStamp = 0;
     std::shared_ptr<C2Buffer> buffer;
     uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
     const uint8_t* inPtr = rView.data() + inOffset;
@@ -584,7 +584,7 @@
         mOutputBlock.reset();
     }
     mProcessedSamples += (mNumPcmBytesPerInputFrame / sizeof(int16_t));
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mChannelCount / mSampleRate;
     outOrdinal.frameIndex = mOutIndex++;
     outOrdinal.timestamp = mAnchorTimeStamp + outTimeStamp;
@@ -612,7 +612,7 @@
         return C2_OMITTED;
     }
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     return drainInternal(pool, nullptr);
 }
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 2b4d8f2..733a6bcd2 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -67,7 +67,7 @@
     uint32_t mSampleRate;
     uint32_t mChannelCount;
     uint32_t mFrameDurationMs;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // Codec delay in ns
     uint64_t mCodecDelay;
diff --git a/media/codec2/components/tests/Android.bp b/media/codec2/components/tests/Android.bp
index 3c68eee..be2abf2 100644
--- a/media/codec2/components/tests/Android.bp
+++ b/media/codec2/components/tests/Android.bp
@@ -9,44 +9,13 @@
 
 cc_defaults {
     name: "C2SoftCodecTest-defaults",
+    defaults: [ "libcodec2-static-defaults" ],
     gtest: true,
     host_supported: false,
     srcs: [
         "C2SoftCodecTest.cpp",
     ],
 
-    static_libs: [
-        "liblog",
-        "libion",
-        "libfmq",
-        "libbase",
-        "libutils",
-        "libcutils",
-        "libcodec2",
-        "libhidlbase",
-        "libdmabufheap",
-        "libcodec2_vndk",
-        "libnativewindow",
-        "libcodec2_soft_common",
-        "libsfplugin_ccodec_utils",
-        "libstagefright_foundation",
-        "libstagefright_bufferpool@2.0.1",
-        "android.hardware.graphics.mapper@2.0",
-        "android.hardware.graphics.mapper@3.0",
-        "android.hardware.media.bufferpool@2.0",
-        "android.hardware.graphics.allocator@2.0",
-        "android.hardware.graphics.allocator@3.0",
-        "android.hardware.graphics.bufferqueue@2.0",
-    ],
-
-    shared_libs: [
-        "libui",
-        "libdl",
-        "libhardware",
-        "libvndksupport",
-        "libprocessgroup",
-    ],
-
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 2953d90..8087396 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -19,12 +19,13 @@
 #include <log/log.h>
 
 #include <algorithm>
-
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
 #include <SimpleC2Interface.h>
 
 #include "C2SoftVpxDec.h"
@@ -149,8 +150,16 @@
 #else
         addParameter(
                 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-                .withConstValue(new C2StreamProfileLevelInfo::input(0u,
-                        C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED))
+                .withDefault(new C2StreamProfileLevelInfo::input(0u,
+                        C2Config::PROFILE_VP8_0, C2Config::LEVEL_UNUSED))
+                .withFields({
+                    C2F(mProfileLevel, profile).equalTo(
+                        PROFILE_VP8_0
+                    ),
+                    C2F(mProfileLevel, level).equalTo(
+                        LEVEL_UNUSED),
+                })
+                .withSetter(ProfileLevelSetter, mSize)
                 .build());
 #endif
 
@@ -209,11 +218,24 @@
                 .build());
 
         // TODO: support more formats?
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+#ifdef VP9
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+#endif
         addParameter(
                 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
-                .withConstValue(new C2StreamPixelFormatInfo::output(
-                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                .withDefault(new C2StreamPixelFormatInfo::output(
+                                  0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
                 .build());
+
     }
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
@@ -289,6 +311,11 @@
         return C2R::Ok();
     }
 
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
 private:
     std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
     std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -415,6 +442,11 @@
 #else
     mMode = MODE_VP8;
 #endif
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
 
     mWidth = 320;
     mHeight = 240;
@@ -630,125 +662,6 @@
     }
 }
 
-static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        uint32_t width, uint32_t height) {
-
-    for (size_t i = 0; i < height; ++i) {
-         memcpy(dstY, srcY, width);
-         srcY += srcYStride;
-         dstY += dstYStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstV, srcV, width / 2);
-         srcV += srcVStride;
-         dstV += dstUVStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstU, srcU, width / 2);
-         srcU += srcUStride;
-         dstU += dstUVStride;
-    }
-
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstStride, size_t width, size_t height) {
-
-    // Converting two lines at a time, slightly faster
-    for (size_t y = 0; y < height; y += 2) {
-        uint32_t *dstTop = (uint32_t *) dst;
-        uint32_t *dstBot = (uint32_t *) (dst + dstStride);
-        uint16_t *ySrcTop = (uint16_t*) srcY;
-        uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
-        uint16_t *uSrc = (uint16_t*) srcU;
-        uint16_t *vSrc = (uint16_t*) srcV;
-
-        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
-        size_t x = 0;
-        for (; x < width - 3; x += 4) {
-
-            u01 = *((uint32_t*)uSrc); uSrc += 2;
-            v01 = *((uint32_t*)vSrc); vSrc += 2;
-
-            y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-            y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
-            *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
-            *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
-            *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
-            *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
-        }
-
-        // There should be at most 2 more pixels to process. Note that we don't
-        // need to consider odd case as the buffer is always aligned to even.
-        if (x < width) {
-            u01 = *uSrc;
-            v01 = *vSrc;
-            y01 = *((uint32_t*)ySrcTop);
-            y45 = *((uint32_t*)ySrcBot);
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = ((y01 >> 16) << 10) | uv0;
-            *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = ((y45 >> 16) << 10) | uv0;
-        }
-
-        srcY += srcYStride * 2;
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dst += dstStride * 2;
-    }
-
-    return;
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        size_t width, size_t height) {
-
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; ++x) {
-            dstY[x] = (uint8_t)(srcY[x] >> 2);
-        }
-
-        srcY += srcYStride;
-        dstY += dstYStride;
-    }
-
-    for (size_t y = 0; y < (height + 1) / 2; ++y) {
-        for (size_t x = 0; x < (width + 1) / 2; ++x) {
-            dstU[x] = (uint8_t)(srcU[x] >> 2);
-            dstV[x] = (uint8_t)(srcV[x] >> 2);
-        }
-
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dstU += dstUVStride;
-        dstV += dstUVStride;
-    }
-    return;
-}
 status_t C2SoftVpxDec::outputBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const std::unique_ptr<C2Work> &work)
@@ -789,16 +702,37 @@
 
     std::shared_ptr<C2GraphicBlock> block;
     uint32_t format = HAL_PIXEL_FORMAT_YV12;
-    if (img->fmt == VPX_IMG_FMT_I42016) {
+    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects;
+    if (img->fmt == VPX_IMG_FMT_I42016 &&
+            mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
         IntfImpl::Lock lock = mIntf->lock();
-        std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
-
+        defaultColorAspects = mIntf->getDefaultColorAspects_l();
+        bool allowRGBA1010102 = false;
         if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
             defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
             defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
-            format = HAL_PIXEL_FORMAT_RGBA_1010102;
+            allowRGBA1010102 = true;
         }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
     }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
     C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
     c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
     if (err != C2_OK) {
@@ -842,11 +776,14 @@
                 queue->entries.push_back(
                         [dstY, srcY, srcU, srcV,
                          srcYStride, srcUStride, srcVStride, dstYStride,
-                         width = mWidth, height = std::min(mHeight - i, kHeight)] {
-                            convertYUV420Planar16ToY410(
+                         width = mWidth, height = std::min(mHeight - i, kHeight),
+                         defaultColorAspects] {
+                            convertYUV420Planar16ToY410OrRGBA1010102(
                                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
-                                    width, height);
+                                    width, height,
+                                    std::static_pointer_cast<const C2ColorAspectsStruct>(
+                                            defaultColorAspects));
                         });
                 srcY += srcYStride / 2 * kHeight;
                 srcU += srcUStride / 2 * (kHeight / 2);
@@ -859,24 +796,22 @@
                 queue->cond.signal();
                 queue.waitForCondition(queue->cond);
             }
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+                                        srcYStride / 2, srcUStride / 2, srcVStride / 2,
+                                        dstYStride / 2, dstUVStride / 2, mWidth, mHeight);
         } else {
-            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
-                                                srcY, srcU, srcV,
-                                                srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                                dstYStride, dstUVStride,
-                                                mWidth, mHeight);
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        mWidth, mHeight);
         }
     } else {
         const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
 
-        copyOutputBufferToYuvPlanarFrame(
-                dstY, dstU, dstV,
-                srcY, srcU, srcV,
-                srcYStride, srcUStride, srcVStride,
-                dstYStride, dstUVStride,
-                mWidth, mHeight);
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
     }
     finishWork(((c2_cntr64_t *)img->user_priv)->peekull(), work, std::move(block));
     return OK;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 2065165..e9d6dc9 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -63,10 +63,15 @@
         std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
     };
 
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
     std::shared_ptr<IntfImpl> mIntf;
     vpx_codec_ctx_t *mCodecCtx;
     bool mFrameParallelMode;  // Frame parallel is only supported by VP9 decoder.
 
+    uint32_t mHalPixelFormat;
     uint32_t mWidth;
     uint32_t mHeight;
     bool mSignalledOutputEos;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7486d27..f99ee24 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -31,6 +31,255 @@
 
 namespace android {
 
+C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+    : SimpleInterface<void>::BaseParams(
+            helper,
+            COMPONENT_NAME,
+            C2Component::KIND_ENCODER,
+            C2Component::DOMAIN_VIDEO,
+            MEDIA_MIMETYPE_VIDEO) {
+    noPrivateBuffers(); // TODO: account for our buffers here
+    noInputReferences();
+    noOutputReferences();
+    noInputLatency();
+    noTimeStretch();
+    setDerivedInstance(this);
+
+    addParameter(
+            DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+            .withConstValue(new C2ComponentAttributesSetting(
+                C2Component::ATTRIB_IS_TEMPORAL))
+            .build());
+
+    addParameter(
+            DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+            .withConstValue(new C2StreamUsageTuning::input(
+                    0u, (uint64_t)C2MemoryUsage::CPU_READ))
+            .build());
+
+    addParameter(
+        DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+            .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+            .withFields({
+                C2F(mSize, width).inRange(2, 2048, 2),
+                C2F(mSize, height).inRange(2, 2048, 2),
+            })
+            .withSetter(SizeSetter)
+            .build());
+
+    addParameter(
+        DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+            .withDefault(new C2StreamBitrateModeTuning::output(
+                    0u, C2Config::BITRATE_VARIABLE))
+            .withFields({
+                C2F(mBitrateMode, value).oneOf({
+                    C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
+            })
+            .withSetter(
+                Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+        DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+            .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+            // TODO: More restriction?
+            .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+            .withSetter(
+                Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+        DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
+            .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
+            .withFields({
+                C2F(mLayering, m.layerCount).inRange(0, 4),
+                C2F(mLayering, m.bLayerCount).inRange(0, 0),
+                C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
+            })
+            .withSetter(LayeringSetter)
+            .build());
+
+    addParameter(
+            DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+            .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+            .withFields({C2F(mSyncFramePeriod, value).any()})
+            .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+        DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+            .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+            .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+            .withSetter(BitrateSetter)
+            .build());
+
+    addParameter(
+            DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+            .withConstValue(new C2StreamIntraRefreshTuning::output(
+                            0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+            .build());
+#ifdef VP9
+    addParameter(
+            DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+            .withDefault(new C2StreamProfileLevelInfo::output(
+                    0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
+            .withFields({
+                C2F(mProfileLevel, profile).equalTo(
+                    PROFILE_VP9_0
+                ),
+                C2F(mProfileLevel, level).equalTo(
+                    LEVEL_VP9_4_1),
+            })
+            .withSetter(ProfileLevelSetter)
+            .build());
+#else
+    addParameter(
+            DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+            .withDefault(new C2StreamProfileLevelInfo::output(
+                    0u, PROFILE_VP8_0, LEVEL_UNUSED))
+            .withFields({
+                C2F(mProfileLevel, profile).equalTo(
+                    PROFILE_VP8_0
+                ),
+                C2F(mProfileLevel, level).equalTo(
+                    LEVEL_UNUSED),
+            })
+            .withSetter(ProfileLevelSetter)
+            .build());
+#endif
+    addParameter(
+            DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+            .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+            .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+            .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+            DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+            .withDefault(new C2StreamColorAspectsInfo::input(
+                    0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                    C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+            .withFields({
+                C2F(mColorAspects, range).inRange(
+                            C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                C2F(mColorAspects, primaries).inRange(
+                            C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                C2F(mColorAspects, transfer).inRange(
+                            C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                C2F(mColorAspects, matrix).inRange(
+                            C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+            })
+            .withSetter(ColorAspectsSetter)
+            .build());
+
+    addParameter(
+            DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+            .withDefault(new C2StreamColorAspectsInfo::output(
+                    0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                    C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+            .withFields({
+                C2F(mCodedColorAspects, range).inRange(
+                            C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                C2F(mCodedColorAspects, primaries).inRange(
+                            C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                C2F(mCodedColorAspects, transfer).inRange(
+                            C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                C2F(mCodedColorAspects, matrix).inRange(
+                            C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+            })
+            .withSetter(CodedColorAspectsSetter, mColorAspects)
+            .build());
+}
+
+C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (me.v.value < 4096) {
+        me.set().value = 4096;
+    }
+    return res;
+}
+
+C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
+                                       const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                                       C2P<C2StreamPictureSizeInfo::input>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+        me.set().width = oldMe.v.width;
+    }
+    if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+        me.set().height = oldMe.v.height;
+    }
+    return res;
+}
+
+C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
+                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+    (void)mayBlock;
+    if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+        me.set().profile = PROFILE_VP9_0;
+    }
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        me.set().level = LEVEL_VP9_4_1;
+    }
+    return C2R::Ok();
+}
+
+C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
+                                           C2P<C2StreamTemporalLayeringTuning::output>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (me.v.m.layerCount > 4) {
+        me.set().m.layerCount = 4;
+    }
+    me.set().m.bLayerCount = 0;
+    // ensure ratios are monotonic and clamped between 0 and 1
+    for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
+        me.set().m.bitrateRatios[ix] = c2_clamp(
+            ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
+    }
+    ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
+    return res;
+}
+
+uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
+    if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+        return 0;
+    }
+    double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+    return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+}
+C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
+                                               C2P<C2StreamColorAspectsInfo::input>& me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+}
+C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
+        bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+        const C2P<C2StreamColorAspectsInfo::input>& coded) {
+    (void)mayBlock;
+    me.set().range = coded.v.range;
+    me.set().primaries = coded.v.primaries;
+    me.set().transfer = coded.v.transfer;
+    me.set().matrix = coded.v.matrix;
+    return C2R::Ok();
+}
+
 #if 0
 static size_t getCpuCoreCount() {
     long cpuCoreCount = 1;
@@ -484,8 +733,14 @@
     switch (layout.type) {
         case C2PlanarLayout::TYPE_RGB:
         case C2PlanarLayout::TYPE_RGBA: {
+            std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
+            {
+                IntfImpl::Lock lock = mIntf->lock();
+                colorAspects = mIntf->getCodedColorAspects_l();
+            }
             ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
-                                  mConversionBuffer.size(), *rView.get());
+                                  mConversionBuffer.size(), *rView.get(),
+                                  colorAspects->matrix, colorAspects->range);
             vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
                          mStrideAlign, mConversionBuffer.data());
             break;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index c98b802..714fadb 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -237,245 +237,41 @@
 
 class C2SoftVpxEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
    public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : SimpleInterface<void>::BaseParams(
-                helper,
-                COMPONENT_NAME,
-                C2Component::KIND_ENCODER,
-                C2Component::DOMAIN_VIDEO,
-                MEDIA_MIMETYPE_VIDEO) {
-        noPrivateBuffers(); // TODO: account for our buffers here
-        noInputReferences();
-        noOutputReferences();
-        noInputLatency();
-        noTimeStretch();
-        setDerivedInstance(this);
-
-        addParameter(
-                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
-                .withConstValue(new C2ComponentAttributesSetting(
-                    C2Component::ATTRIB_IS_TEMPORAL))
-                .build());
-
-        addParameter(
-                DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
-                .withConstValue(new C2StreamUsageTuning::input(
-                        0u, (uint64_t)C2MemoryUsage::CPU_READ))
-                .build());
-
-        addParameter(
-            DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-                .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
-                .withFields({
-                    C2F(mSize, width).inRange(2, 2048, 2),
-                    C2F(mSize, height).inRange(2, 2048, 2),
-                })
-                .withSetter(SizeSetter)
-                .build());
-
-        addParameter(
-            DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
-                .withDefault(new C2StreamBitrateModeTuning::output(
-                        0u, C2Config::BITRATE_VARIABLE))
-                .withFields({
-                    C2F(mBitrateMode, value).oneOf({
-                        C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
-                })
-                .withSetter(
-                    Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-            DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-                .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
-                // TODO: More restriction?
-                .withFields({C2F(mFrameRate, value).greaterThan(0.)})
-                .withSetter(
-                    Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-            DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
-                .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
-                .withFields({
-                    C2F(mLayering, m.layerCount).inRange(0, 4),
-                    C2F(mLayering, m.bLayerCount).inRange(0, 0),
-                    C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
-                })
-                .withSetter(LayeringSetter)
-                .build());
-
-        addParameter(
-                DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
-                .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
-                .withFields({C2F(mSyncFramePeriod, value).any()})
-                .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-            DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
-                .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
-                .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
-                .withSetter(BitrateSetter)
-                .build());
-
-        addParameter(
-                DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
-                .withConstValue(new C2StreamIntraRefreshTuning::output(
-                             0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
-                .build());
-
-        addParameter(
-        DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-        .withDefault(new C2StreamProfileLevelInfo::output(
-                0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
-        .withFields({
-            C2F(mProfileLevel, profile).equalTo(
-                PROFILE_VP9_0
-            ),
-            C2F(mProfileLevel, level).equalTo(
-                LEVEL_VP9_4_1),
-        })
-        .withSetter(ProfileLevelSetter)
-        .build());
-
-        addParameter(
-                DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
-                .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
-                .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
-                .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
-                .withDefault(new C2StreamColorAspectsInfo::input(
-                        0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
-                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
-                .withFields({
-                    C2F(mColorAspects, range).inRange(
-                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
-                    C2F(mColorAspects, primaries).inRange(
-                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
-                    C2F(mColorAspects, transfer).inRange(
-                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
-                    C2F(mColorAspects, matrix).inRange(
-                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
-                })
-                .withSetter(ColorAspectsSetter)
-                .build());
-
-        addParameter(
-                DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
-                .withDefault(new C2StreamColorAspectsInfo::output(
-                        0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
-                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
-                .withFields({
-                    C2F(mCodedColorAspects, range).inRange(
-                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
-                    C2F(mCodedColorAspects, primaries).inRange(
-                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
-                    C2F(mCodedColorAspects, transfer).inRange(
-                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
-                    C2F(mCodedColorAspects, matrix).inRange(
-                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
-                })
-                .withSetter(CodedColorAspectsSetter, mColorAspects)
-                .build());
-    }
-
-    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
-        (void)mayBlock;
-        C2R res = C2R::Ok();
-        if (me.v.value <= 4096) {
-            me.set().value = 4096;
-        }
-        return res;
-    }
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper);
+    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me);
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
-                          C2P<C2StreamPictureSizeInfo::input> &me) {
-        (void)mayBlock;
-        C2R res = C2R::Ok();
-        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
-            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
-            me.set().width = oldMe.v.width;
-        }
-        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
-            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
-            me.set().height = oldMe.v.height;
-        }
-        return res;
-    }
+                          C2P<C2StreamPictureSizeInfo::input> &me);
 
     static C2R ProfileLevelSetter(
             bool mayBlock,
-            C2P<C2StreamProfileLevelInfo::output> &me) {
-        (void)mayBlock;
-        if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
-            me.set().profile = PROFILE_VP9_0;
-        }
-        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
-            me.set().level = LEVEL_VP9_4_1;
-        }
-        return C2R::Ok();
-    }
+            C2P<C2StreamProfileLevelInfo::output> &me);
 
-    static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me) {
-        (void)mayBlock;
-        C2R res = C2R::Ok();
-        if (me.v.m.layerCount > 4) {
-            me.set().m.layerCount = 4;
-        }
-        me.set().m.bLayerCount = 0;
-        // ensure ratios are monotonic and clamped between 0 and 1
-        for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
-            me.set().m.bitrateRatios[ix] = c2_clamp(
-                ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
-        }
-        ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
-        return res;
-    }
+    static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
 
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
-    std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
+        return mIntraRefresh;
+    }
     std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
-    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const { return mBitrateMode; }
-    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
-    std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const { return mLayering; }
-    uint32_t getSyncFramePeriod() const {
-        if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
-            return 0;
-        }
-        double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
-        return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+        return mBitrateMode;
     }
-    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
-        (void)mayBlock;
-        if (me.v.range > C2Color::RANGE_OTHER) {
-                me.set().range = C2Color::RANGE_OTHER;
-        }
-        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
-                me.set().primaries = C2Color::PRIMARIES_OTHER;
-        }
-        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
-                me.set().transfer = C2Color::TRANSFER_OTHER;
-        }
-        if (me.v.matrix > C2Color::MATRIX_OTHER) {
-                me.set().matrix = C2Color::MATRIX_OTHER;
-        }
-        return C2R::Ok();
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+        return mRequestSync;
     }
+    std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const {
+        return mLayering;
+    }
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+        return mCodedColorAspects;
+    }
+    uint32_t getSyncFramePeriod() const;
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
-                                       const C2P<C2StreamColorAspectsInfo::input> &coded) {
-        (void)mayBlock;
-        me.set().range = coded.v.range;
-        me.set().primaries = coded.v.primaries;
-        me.set().transfer = coded.v.transfer;
-        me.set().matrix = coded.v.matrix;
-        return C2R::Ok();
-    }
+                                       const C2P<C2StreamColorAspectsInfo::input> &coded);
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 64999b7..7d5740b 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -11,6 +11,10 @@
     name: "libcodec2_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
     export_include_dirs: ["include"],
 }
 
@@ -18,6 +22,10 @@
     name: "libcodec2",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
     vndk: {
         enabled: true,
     },
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 2cc7ab7..6ff3dbc 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -59,6 +59,8 @@
     enum drc_compression_mode_t : int32_t;  ///< DRC compression mode
     enum drc_effect_type_t : int32_t;       ///< DRC effect type
     enum drc_album_mode_t : int32_t;        ///< DRC album mode
+    enum hdr_dynamic_metadata_type_t : uint32_t;  ///< HDR dynamic metadata type
+    enum hdr_format_t : uint32_t;           ///< HDR format
     enum intra_refresh_mode_t : uint32_t;   ///< intra refresh modes
     enum level_t : uint32_t;                ///< coding level
     enum ordinal_key_t : uint32_t;          ///< work ordering keys
@@ -77,6 +79,7 @@
 
 struct C2PlatformConfig {
     enum encoding_quality_level_t : uint32_t; ///< encoding quality level
+    enum tunnel_peek_mode_t: uint32_t;      ///< tunnel peek mode
 };
 
 namespace {
@@ -189,9 +192,11 @@
 
     kParamIndexPictureTypeMask,
     kParamIndexPictureType,
+    // deprecated
     kParamIndexHdr10PlusMetadata,
-
     kParamIndexPictureQuantization,
+    kParamIndexHdrDynamicMetadata,
+    kParamIndexHdrFormat,
 
     /* ------------------------------------ video components ------------------------------------ */
 
@@ -270,6 +275,15 @@
 
     // encoding quality requirements
     kParamIndexEncodingQualityLevel, // encoders, enum
+
+    // encoding statistics, average block qp of a frame
+    kParamIndexAverageBlockQuantization, // int32
+
+    // channel mask for decoded audio
+    kParamIndexAndroidChannelMask, // uint32
+
+    // allow tunnel peek behavior to be unspecified for app compatibility
+    kParamIndexTunnelPeekMode, // tunnel mode, enum
 };
 
 }
@@ -680,6 +694,9 @@
     LEVEL_DV_MAIN_UHD_30,                       ///< Dolby Vision main tier uhd30
     LEVEL_DV_MAIN_UHD_48,                       ///< Dolby Vision main tier uhd48
     LEVEL_DV_MAIN_UHD_60,                       ///< Dolby Vision main tier uhd60
+    LEVEL_DV_MAIN_UHD_120,                      ///< Dolby Vision main tier uhd120
+    LEVEL_DV_MAIN_8K_30,                        ///< Dolby Vision main tier 8k30
+    LEVEL_DV_MAIN_8K_60,                        ///< Dolby Vision main tier 8k60
 
     LEVEL_DV_HIGH_HD_24 = _C2_PL_DV_BASE + 0x100,  ///< Dolby Vision high tier hd24
     LEVEL_DV_HIGH_HD_30,                        ///< Dolby Vision high tier hd30
@@ -690,6 +707,9 @@
     LEVEL_DV_HIGH_UHD_30,                       ///< Dolby Vision high tier uhd30
     LEVEL_DV_HIGH_UHD_48,                       ///< Dolby Vision high tier uhd48
     LEVEL_DV_HIGH_UHD_60,                       ///< Dolby Vision high tier uhd60
+    LEVEL_DV_HIGH_UHD_120,                      ///< Dolby Vision high tier uhd120
+    LEVEL_DV_HIGH_8K_30,                        ///< Dolby Vision high tier 8k30
+    LEVEL_DV_HIGH_8K_60,                        ///< Dolby Vision high tier 8k60
 
     // AV1 levels
     LEVEL_AV1_2    = _C2_PL_AV1_BASE ,          ///< AV1 Level 2
@@ -1602,16 +1622,82 @@
     C2FIELD(maxFall, "max-fall")
 };
 typedef C2StreamParam<C2Info, C2HdrStaticMetadataStruct, kParamIndexHdrStaticMetadata>
-        C2StreamHdrStaticInfo;
+        C2StreamHdrStaticMetadataInfo;
+typedef C2StreamParam<C2Info, C2HdrStaticMetadataStruct, kParamIndexHdrStaticMetadata>
+        C2StreamHdrStaticInfo;  // deprecated
 constexpr char C2_PARAMKEY_HDR_STATIC_INFO[] = "raw.hdr-static-info";
 
 /**
  * HDR10+ Metadata Info.
+ *
+ * Deprecated. Use C2StreamHdrDynamicMetadataInfo with
+ * HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40
  */
 typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexHdr10PlusMetadata>
-        C2StreamHdr10PlusInfo;
-constexpr char C2_PARAMKEY_INPUT_HDR10_PLUS_INFO[] = "input.hdr10-plus-info";
-constexpr char C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO[] = "output.hdr10-plus-info";
+        C2StreamHdr10PlusInfo;  // deprecated
+constexpr char C2_PARAMKEY_INPUT_HDR10_PLUS_INFO[] = "input.hdr10-plus-info";  // deprecated
+constexpr char C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO[] = "output.hdr10-plus-info";  // deprecated
+
+/**
+ * HDR dynamic metadata types
+ */
+C2ENUM(C2Config::hdr_dynamic_metadata_type_t, uint32_t,
+    HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10,  ///< SMPTE ST 2094-10
+    HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40,  ///< SMPTE ST 2094-40
+)
+
+struct C2HdrDynamicMetadataStruct {
+    inline C2HdrDynamicMetadataStruct() { memset(this, 0, sizeof(*this)); }
+
+    inline C2HdrDynamicMetadataStruct(
+            size_t flexCount, C2Config::hdr_dynamic_metadata_type_t type)
+        : type_(type) {
+        memset(data, 0, flexCount);
+    }
+
+    C2Config::hdr_dynamic_metadata_type_t type_;
+    uint8_t data[];
+
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(HdrDynamicMetadata, data)
+    C2FIELD(type_, "type")
+    C2FIELD(data, "data")
+};
+
+/**
+ * Dynamic HDR Metadata Info.
+ */
+typedef C2StreamParam<C2Info, C2HdrDynamicMetadataStruct, kParamIndexHdrDynamicMetadata>
+        C2StreamHdrDynamicMetadataInfo;
+constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
+constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
+
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+    UNKNOWN,     ///< HDR format not known (default)
+    SDR,         ///< not HDR (SDR)
+    HLG,         ///< HLG
+    HDR10,       ///< HDR10
+    HDR10_PLUS,  ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+                kParamIndexHdrFormat>
+        C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
 
 /* ------------------------------------ block-based coding ----------------------------------- */
 
@@ -1673,7 +1759,7 @@
     SYNC_FRAME = (1 << 0),  ///< sync frame, e.g. IDR
     I_FRAME    = (1 << 1),  ///< intra frame that is completely encoded
     P_FRAME    = (1 << 2),  ///< inter predicted frame from previous frames
-    B_FRAME    = (1 << 3),  ///< backward predicted (out-of-order) frame
+    B_FRAME    = (1 << 3),  ///< bidirectional predicted (out-of-order) frame
 )
 
 /**
@@ -1930,6 +2016,14 @@
 constexpr char C2_PARAMKEY_MAX_CODED_CHANNEL_COUNT[] = "coded.max-channel-count";
 
 /**
+ * Audio channel mask. Used by decoder to express audio channel mask of decoded content.
+ * Channel representation is specified according to the Java android.media.AudioFormat
+ * CHANNEL_OUT_* constants.
+ */
+ typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexAndroidChannelMask> C2StreamChannelMaskInfo;
+ const char C2_PARAMKEY_CHANNEL_MASK[] = "raw.channel-mask";
+
+/**
  * Audio sample format (PCM encoding)
  */
 C2ENUM(C2Config::pcm_encoding_t, uint32_t,
@@ -2392,6 +2486,28 @@
         C2StreamTunnelStartRender;
 constexpr char C2_PARAMKEY_TUNNEL_START_RENDER[] = "output.tunnel-start-render";
 
+/** Tunnel Peek Mode. */
+C2ENUM(C2PlatformConfig::tunnel_peek_mode_t, uint32_t,
+    UNSPECIFIED_PEEK = 0,
+    SPECIFIED_PEEK = 1
+);
+
+/**
+ * Tunnel Peek Mode Tuning parameter.
+ *
+ * If set to UNSPECIFIED_PEEK_MODE, the decoder is free to ignore the
+ * C2StreamTunnelHoldRender and C2StreamTunnelStartRender flags and associated
+ * features. Additionally, it becomes up to the decoder to display any frame
+ * before receiving synchronization information.
+ *
+ * Note: This parameter allows a decoder to ignore the video peek machinery and
+ * to revert to its preferred behavior.
+ */
+typedef C2StreamParam<C2Tuning, C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>,
+        kParamIndexTunnelPeekMode> C2StreamTunnelPeekModeTuning;
+constexpr char C2_PARAMKEY_TUNNEL_PEEK_MODE[] =
+        "output.tunnel-peek-mode";
+
 /**
  * Encoding quality level signaling.
  *
@@ -2411,6 +2527,17 @@
     S_HANDHELD = 1              // corresponds to VMAF=70
 );
 
+/**
+ * Video Encoding Statistics Export
+ */
+
+/**
+ * Average block QP exported from video encoder.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<int32_t>, kParamIndexAverageBlockQuantization>
+        C2AndroidStreamAverageBlockQuantizationInfo;
+constexpr char C2_PARAMKEY_AVERAGE_QP[] = "coded.average-qp";
+
 /// @}
 
 #endif  // C2CONFIG_H_
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index bd1fac6..147a52e 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -28,47 +28,22 @@
 cc_defaults {
     name: "C2Fuzzer-defaults",
 
+    defaults: [ "libcodec2-static-defaults" ],
+
     srcs: [
         "C2Fuzzer.cpp",
     ],
 
-    static_libs: [
-        "liblog",
-        "libion",
-        "libfmq",
-        "libbase",
-        "libutils",
-        "libcutils",
-        "libcodec2",
-        "libhidlbase",
-        "libdmabufheap",
-        "libcodec2_vndk",
-        "libnativewindow",
-        "libcodec2_soft_common",
-        "libsfplugin_ccodec_utils",
-        "libstagefright_foundation",
-        "libstagefright_bufferpool@2.0.1",
-        "android.hardware.graphics.mapper@2.0",
-        "android.hardware.graphics.mapper@3.0",
-        "android.hardware.media.bufferpool@2.0",
-        "android.hardware.graphics.allocator@2.0",
-        "android.hardware.graphics.allocator@3.0",
-        "android.hardware.graphics.bufferqueue@2.0",
-    ],
-
-    shared_libs: [
-        "libui",
-        "libdl",
-        "libbinder",
-        "libhardware",
-        "libvndksupport",
-        "libprocessgroup",
-    ],
-
     cflags: [
         "-Wall",
         "-Werror",
     ],
+
+    fuzz_config: {
+        cc: [
+            "wonsik@google.com",
+        ],
+    },
 }
 
 cc_fuzz {
diff --git a/media/codec2/fuzzer/C2Fuzzer.cpp b/media/codec2/fuzzer/C2Fuzzer.cpp
index 51e1013..e469d8b 100644
--- a/media/codec2/fuzzer/C2Fuzzer.cpp
+++ b/media/codec2/fuzzer/C2Fuzzer.cpp
@@ -194,12 +194,12 @@
   }
 
   std::vector<C2Param*> configParams;
+  C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
+  C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
+  C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
   if (domain.value == DOMAIN_VIDEO) {
-    C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
     configParams.push_back(&inputSize);
   } else if (domain.value == DOMAIN_AUDIO) {
-    C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
-    C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
     configParams.push_back(&sampleRateInfo);
     configParams.push_back(&channelCountInfo);
   }
@@ -239,17 +239,17 @@
 }
 
 void Codec2Fuzzer::decodeFrames(const uint8_t* data, size_t size) {
-  mBufferSource = new BufferSource(data, size);
-  if (!mBufferSource) {
+  std::unique_ptr<BufferSource> bufferSource = std::make_unique<BufferSource>(data, size);
+  if (!bufferSource) {
     return;
   }
-  mBufferSource->parse();
+  bufferSource->parse();
   c2_status_t status = C2_OK;
   size_t numFrames = 0;
-  while (!mBufferSource->isEos()) {
+  while (!bufferSource->isEos()) {
     uint8_t* frame = nullptr;
     size_t frameSize = 0;
-    FrameData frameData = mBufferSource->getFrame();
+    FrameData frameData = bufferSource->getFrame();
     frame = std::get<0>(frameData);
     frameSize = std::get<1>(frameData);
 
@@ -298,7 +298,6 @@
   mConditionalVariable.wait_for(waitForDecodeComplete, kC2FuzzerTimeOut, [this] { return mEos; });
   std::list<std::unique_ptr<C2Work>> c2flushedWorks;
   mComponent->flush_sm(C2Component::FLUSH_COMPONENT, &c2flushedWorks);
-  delete mBufferSource;
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
diff --git a/media/codec2/fuzzer/C2Fuzzer.h b/media/codec2/fuzzer/C2Fuzzer.h
index d5ac81a..da76885 100644
--- a/media/codec2/fuzzer/C2Fuzzer.h
+++ b/media/codec2/fuzzer/C2Fuzzer.h
@@ -104,7 +104,6 @@
     static constexpr size_t kMarkerSuffixSize = 3;
   };
 
-  BufferSource* mBufferSource;
   bool mEos = false;
   C2BlockPool::local_id_t mBlockPoolId;
 
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 122aacd..db7874d 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -57,6 +57,10 @@
     name: "libcodec2_hidl@1.0",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     defaults: ["hidl_defaults"],
 
diff --git a/media/codec2/hidl/1.0/utils/Component.cpp b/media/codec2/hidl/1.0/utils/Component.cpp
index 082c5e3..df30dba 100644
--- a/media/codec2/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hidl/1.0/utils/Component.cpp
@@ -482,6 +482,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : mComponent{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = mComponent.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> mComponent;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index 86dccd0..e343655 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -132,6 +132,9 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
 };
 
 }  // namespace utils
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 72f7c43..5c24bd7 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -26,6 +26,7 @@
 #include <C2BlockInternal.h>
 #include <C2Buffer.h>
 #include <C2Component.h>
+#include <C2FenceFactory.h>
 #include <C2Param.h>
 #include <C2ParamInternal.h>
 #include <C2PlatformSupport.h>
@@ -759,17 +760,14 @@
 // Note: File descriptors are not duplicated. The original file descriptor must
 // not be closed before the transaction is complete.
 bool objcpy(hidl_handle* d, const C2Fence& s) {
-    (void)s; // TODO: implement s.fd()
-    int fenceFd = -1;
     d->setTo(nullptr);
-    if (fenceFd >= 0) {
-        native_handle_t *handle = native_handle_create(1, 0);
-        if (!handle) {
-            LOG(ERROR) << "Failed to create a native handle.";
-            return false;
-        }
-        handle->data[0] = fenceFd;
+    native_handle_t* handle = _C2FenceFactory::CreateNativeHandle(s);
+    if (handle) {
         d->setTo(handle, true /* owns */);
+//  } else if (!s.ready()) {
+//      // TODO: we should wait for unmarshallable fences but this may not be
+//      // the best place for it. We can safely ignore here as at this time
+//      // all fences used here are marshallable.
     }
     return true;
 }
@@ -1184,9 +1182,8 @@
 // Note: File descriptors are not duplicated. The original file descriptor must
 // not be closed before the transaction is complete.
 bool objcpy(C2Fence* d, const hidl_handle& s) {
-    // TODO: Implement.
-    (void)s;
-    *d = C2Fence();
+    const native_handle_t* handle = s.getNativeHandle();
+    *d = _C2FenceFactory::CreateFromNativeHandle(handle);
     return true;
 }
 
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
deleted file mode 120000
index 136279c..0000000
--- a/media/codec2/hidl/1.0/vts/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/OWNERS b/media/codec2/hidl/1.0/vts/OWNERS
index dbe89cf..32b11b8 100644
--- a/media/codec2/hidl/1.0/vts/OWNERS
+++ b/media/codec2/hidl/1.0/vts/OWNERS
@@ -1,8 +1,5 @@
+# Bug component: 25690
 # Media team
 lajos@google.com
-pawin@google.com
 taklee@google.com
 wonsik@google.com
-
-# VTS team
-dshi@google.com
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 9e3a823..d47ef67 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -465,6 +465,11 @@
     if (mMime.find("raw") != std::string::npos) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
+    } else if ((mMime.find("g711-alaw") != std::string::npos) ||
+               (mMime.find("g711-mlaw") != std::string::npos)) {
+        // g711 test data is all 1-channel and has no embedded config info.
+        bitStreamInfo[0] = 8000;
+        bitStreamInfo[1] = 1;
     } else {
         ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
     }
diff --git a/media/codec2/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hidl/1.0/vts/functional/common/Android.bp
index 4106be8..be4bafa 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/common/Android.bp
@@ -14,8 +14,8 @@
         "libcodec2-hidl-client-defaults",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/codec2/hidl/client/include",
+    header_libs: [
+        "libcodec2_client_headers",
     ],
 
     srcs: [
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ffec897..275a721 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -61,6 +61,7 @@
   public:
     virtual void SetUp() override {
         getParams();
+        mDisableTest = false;
         mEos = false;
         mClient = android::Codec2Client::CreateFromService(mInstanceName.c_str());
         ASSERT_NE(mClient, nullptr);
@@ -73,6 +74,14 @@
         for (int i = 0; i < MAX_INPUT_BUFFERS; ++i) {
             mWorkQueue.emplace_back(new C2Work);
         }
+
+        C2SecureModeTuning secureModeTuning{};
+        mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
+        if (secureModeTuning.value != C2Config::SM_UNPROTECTED) {
+            mDisableTest = true;
+        }
+
+        if (mDisableTest) std::cout << "[   WARN   ] Test Disabled \n";
     }
 
     virtual void TearDown() override {
@@ -105,6 +114,7 @@
     std::string mInstanceName;
     std::string mComponentName;
     bool mEos;
+    bool mDisableTest;
     std::mutex mQueueLock;
     std::condition_variable mQueueCondition;
     std::list<std::unique_ptr<C2Work>> mWorkQueue;
@@ -324,6 +334,7 @@
 };
 
 TEST_P(Codec2ComponentInputTests, InputBufferTest) {
+    if (mDisableTest) GTEST_SKIP() << "Test is disabled";
     description("Tests for different inputs");
 
     uint32_t flags = std::get<2>(GetParam());
diff --git a/media/codec2/hidl/1.1/utils/Android.bp b/media/codec2/hidl/1.1/utils/Android.bp
index 0eeedb6..ed77a15 100644
--- a/media/codec2/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hidl/1.1/utils/Android.bp
@@ -67,6 +67,11 @@
     name: "libcodec2_hidl@1.1",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
 
     defaults: ["hidl_defaults"],
 
diff --git a/media/codec2/hidl/1.1/utils/Component.cpp b/media/codec2/hidl/1.1/utils/Component.cpp
index 1d7d3d8..2dd922f 100644
--- a/media/codec2/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hidl/1.1/utils/Component.cpp
@@ -489,6 +489,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : component{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = component.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> component;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index 16c81d4..1c8c20c 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -137,6 +137,9 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
 };
 
 } // namespace utils
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
index 8924e6d..7994d32 100644
--- a/media/codec2/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -520,6 +520,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : component{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = component.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> component;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 7937664..d0972ee 100644
--- a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -142,6 +142,10 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
+
 };
 
 } // namespace utils
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hidl/client/Android.bp
index 0e52813..f32711d 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hidl/client/Android.bp
@@ -7,6 +7,24 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_library_headers {
+    name: "libcodec2_client_headers",
+    export_include_dirs: ["include"],
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
 cc_library {
     name: "libcodec2_client",
 
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 42b3c43..0acf7d7 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -1502,6 +1502,7 @@
         igbp = new B2HGraphicBufferProducer2(surface);
     }
 
+    std::scoped_lock lock(mOutputMutex);
     std::shared_ptr<SurfaceSyncObj> syncObj;
 
     if (!surface) {
@@ -1586,6 +1587,24 @@
     mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
 }
 
+void Codec2Client::Component::stopUsingOutputSurface(
+        C2BlockPool::local_id_t blockPoolId) {
+    std::scoped_lock lock(mOutputMutex);
+    mOutputBufferQueue->stop();
+    Return<Status> transStatus = mBase1_0->setOutputSurface(
+            static_cast<uint64_t>(blockPoolId), nullptr);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "setOutputSurface(stopUsingOutputSurface) -- transaction failed.";
+    } else {
+        c2_status_t status =
+                static_cast<c2_status_t>(static_cast<Status>(transStatus));
+        if (status != C2_OK) {
+            LOG(DEBUG) << "setOutputSurface(stopUsingOutputSurface) -- call failed: "
+                       << status << ".";
+        }
+    }
+}
+
 c2_status_t Codec2Client::Component::connectToInputSurface(
         const std::shared_ptr<InputSurface>& inputSurface,
         std::shared_ptr<InputSurfaceConnection>* connection) {
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index 347e58a..49d9b28 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -411,6 +411,10 @@
     // Set max dequeue count for output surface.
     void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
 
+    // Stop using the current output surface.
+    void stopUsingOutputSurface(
+            C2BlockPool::local_id_t blockPoolId);
+
     // Connect to a given InputSurface.
     c2_status_t connectToInputSurface(
             const std::shared_ptr<InputSurface>& inputSurface,
@@ -441,6 +445,11 @@
     struct OutputBufferQueue;
     std::unique_ptr<OutputBufferQueue> mOutputBufferQueue;
 
+    // (b/202903117) Sometimes MediaCodec::setSurface races between normal
+    // setSurface and setSurface with ReleaseSurface due to timing issues.
+    // In order to prevent the race condition mutex is added.
+    std::mutex mOutputMutex;
+
     static c2_status_t setDeathListener(
             const std::shared_ptr<Component>& component,
             const std::shared_ptr<Listener>& listener);
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
index 877148a..a13edf3 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/output.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -50,6 +50,10 @@
                    int maxDequeueBufferCount,
                    std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
 
+    // Stop using the current output surface. Pending buffer opeations will not
+    // perform anymore.
+    void stop();
+
     // Render a graphic block to current surface.
     status_t outputBuffer(
             const C2ConstGraphicBlock& block,
@@ -81,6 +85,7 @@
     sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+    bool mStopped;
 
     bool registerBuffer(const C2ConstGraphicBlock& block);
 };
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index de34c24..f789030 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -169,7 +169,7 @@
 } // unnamed namespace
 
 OutputBufferQueue::OutputBufferQueue()
-      : mGeneration{0}, mBqId{0} {
+      : mGeneration{0}, mBqId{0}, mStopped{false} {
 }
 
 OutputBufferQueue::~OutputBufferQueue() {
@@ -219,6 +219,8 @@
             poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
     {
         std::scoped_lock<std::mutex> l(mMutex);
+        bool stopped = mStopped;
+        mStopped = false;
         if (generation == mGeneration) {
             // case of old BlockPool destruction
             C2SyncVariables *var = mSyncMem ? mSyncMem->mem() : nullptr;
@@ -258,7 +260,7 @@
             return false;
         }
         for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
-            if (mBqId == 0 || !mBuffers[i]) {
+            if (mBqId == 0 || !mBuffers[i] || stopped) {
                 continue;
             }
             std::shared_ptr<_C2BlockPoolData> data = mPoolDatas[i].lock();
@@ -317,6 +319,12 @@
     return true;
 }
 
+void OutputBufferQueue::stop() {
+    std::scoped_lock<std::mutex> l(mMutex);
+    mStopped = true;
+    mOwner.reset(); // destructor of the block will not triger IGBP::cancel()
+}
+
 bool OutputBufferQueue::registerBuffer(const C2ConstGraphicBlock& block) {
     std::shared_ptr<_C2BlockPoolData> data =
             _C2BlockFactory::GetGraphicBlockPoolData(block);
@@ -325,7 +333,7 @@
     }
     std::scoped_lock<std::mutex> l(mMutex);
 
-    if (!mIgbp) {
+    if (!mIgbp || mStopped) {
         return false;
     }
 
@@ -371,11 +379,17 @@
 
         std::shared_ptr<C2SurfaceSyncMemory> syncMem;
         mMutex.lock();
+        bool stopped = mStopped;
         sp<IGraphicBufferProducer> outputIgbp = mIgbp;
         uint32_t outputGeneration = mGeneration;
         syncMem = mSyncMem;
         mMutex.unlock();
 
+        if (stopped) {
+            LOG(INFO) << "outputBuffer -- already stopped.";
+            return DEAD_OBJECT;
+        }
+
         status_t status = attachToBufferQueue(
                 block, outputIgbp, outputGeneration, &bqSlot, syncMem);
 
@@ -408,12 +422,18 @@
 
     std::shared_ptr<C2SurfaceSyncMemory> syncMem;
     mMutex.lock();
+    bool stopped = mStopped;
     sp<IGraphicBufferProducer> outputIgbp = mIgbp;
     uint32_t outputGeneration = mGeneration;
     uint64_t outputBqId = mBqId;
     syncMem = mSyncMem;
     mMutex.unlock();
 
+    if (stopped) {
+        LOG(INFO) << "outputBuffer -- already stopped.";
+        return DEAD_OBJECT;
+    }
+
     if (!outputIgbp) {
         LOG(VERBOSE) << "outputBuffer -- output surface is null.";
         return NO_INIT;
@@ -467,7 +487,7 @@
     mMutex.lock();
     mMaxDequeueBufferCount = maxDequeueBufferCount;
     auto syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
-    if (syncVar) {
+    if (syncVar && !mStopped) {
         syncVar->lock();
         syncVar->updateMaxDequeueCountLocked(maxDequeueBufferCount);
         syncVar->unlock();
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 70c63f2..d5124fd 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -49,6 +49,11 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -187,7 +192,12 @@
         }
 
         std::vector<C2Param *> stackParamsForIntf;
-        std::copy_n(stackParamsList.begin(), stackParamsList.size(), stackParamsForIntf.begin());
+        for (C2Param *param : stackParamsList) {
+            if (mControlParamTypes.count(param->type()) != 0) {
+                continue;
+            }
+            stackParamsForIntf.push_back(param);
+        }
 
         // Gather heap params that did not get queried from the filter interfaces above.
         // These need to be queried from the decoder interface.
@@ -197,6 +207,9 @@
             if (mTypeToIndexForQuery.find(type) != mTypeToIndexForQuery.end()) {
                 continue;
             }
+            if (mControlParamTypes.count(type) != 0) {
+                continue;
+            }
             heapParamIndicesForIntf.push_back(heapParamIndices[j]);
         }
 
@@ -251,11 +264,14 @@
             std::vector<C2Param *> paramsForFilter;
             for (C2Param* param : params) {
                 auto it = mTypeToIndexForConfig.find(param->type().type());
-                if (it != mTypeToIndexForConfig.end() && it->second != i) {
+                if (it == mTypeToIndexForConfig.end() || it->second != i) {
                     continue;
                 }
                 paramsForFilter.push_back(param);
             }
+            if (paramsForFilter.empty()) {
+                continue;
+            }
             c2_status_t err = filter->config_vb(paramsForFilter, mayBlock, &filterFailures);
             if (err != C2_OK) {
                 LOG(err == C2_BAD_INDEX ? VERBOSE : WARNING)
@@ -356,6 +372,7 @@
     std::weak_ptr<FilterWrapper> mFilterWrapper;
     std::map<uint32_t, size_t> mTypeToIndexForQuery;
     std::map<uint32_t, size_t> mTypeToIndexForConfig;
+    std::set<C2Param::Type> mControlParamTypes;
 
     c2_status_t transferParams_l(
             const std::shared_ptr<C2ComponentInterface> &curr,
@@ -430,6 +447,10 @@
             LOG(DEBUG) << "WrappedDecoderInterface: FilterWrapper not found";
             return C2_OK;
         }
+        if (!filterWrapper->isFilteringEnabled(next)) {
+            LOG(VERBOSE) << "WrappedDecoderInterface: filtering not enabled";
+            return C2_OK;
+        }
         std::vector<std::unique_ptr<C2Param>> params;
         c2_status_t err = filterWrapper->queryParamsForPreviousComponent(next, &params);
         if (err != C2_OK) {
@@ -594,6 +615,8 @@
             }
         }
         mRunningFilters.clear();
+        std::vector<FilterWrapper::Component> filters(mFilters);
+        mIntf->takeFilters(std::move(filters));
         return result;
     }
 
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
index b942be7..5c13b0e 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
@@ -807,7 +807,8 @@
     // affectedParams
     {
         C2StreamHdrStaticInfo::output::PARAM_TYPE,
-        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+        C2StreamHdr10PlusInfo::output::PARAM_TYPE,  // will be deprecated
+        C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
         C2StreamColorAspectsInfo::output::PARAM_TYPE,
     },
 };
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index bb9f51f..b36e80a 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -52,7 +52,7 @@
 
     // minijail is used to protect against unexpected system calls.
     shared_libs: [
-        "libavservices_minijail_vendor",
+        "libavservices_minijail",
         "libbinder",
     ],
     required: ["android.hardware.media.c2@1.2-default-seccomp_policy"],
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
index 03f6e3d..12da593 100644
--- a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
+++ b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
@@ -3,5 +3,5 @@
     user mediacodec
     group camera mediadrm drmrpc
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 2bc748f..5a652a3 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -7,6 +7,12 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_library_headers {
+    name: "libsfplugin_ccodec_internal_headers",
+    export_include_dirs: ["."],
+    // only for internal tests, perhaps restrict via visibility clause
+}
+
 cc_library_shared {
     name: "libsfplugin_ccodec",
 
@@ -37,17 +43,23 @@
         "media_ndk_headers",
     ],
 
+    static_libs: [
+        "SurfaceFlingerProperties",
+    ],
+
     shared_libs: [
         "android.hardware.cas.native@1.0",
         "android.hardware.drm@1.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
+        "android.hardware.graphics.mapper@4.0",
         "libbase",
         "libbinder",
         "libcodec2",
         "libcodec2_client",
         "libcodec2_vndk",
         "libcutils",
+        "libgralloctypes",
         "libgui",
         "libhidlallocatorutils",
         "libhidlbase",
@@ -59,6 +71,7 @@
         "libstagefright_codecbase",
         "libstagefright_foundation",
         "libstagefright_omx",
+	"libstagefright_surface_utils",
         "libstagefright_xmlparser",
         "libui",
         "libutils",
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index c049187..ed7d69c 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -42,6 +42,7 @@
 
 #include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
+#include "Codec2Buffer.h"
 
 namespace android {
 
@@ -466,6 +467,18 @@
                 new Buffer2D(block->share(
                         C2Rect(block->width(), block->height()), ::C2Fence())));
         work->input.buffers.push_back(c2Buffer);
+        std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+        GetHdrMetadataFromGralloc4Handle(
+                block->handle(),
+                &staticInfo,
+                &dynamicInfo);
+        if (staticInfo && *staticInfo) {
+            c2Buffer->setInfo(staticInfo);
+        }
+        if (dynamicInfo && *dynamicInfo) {
+            c2Buffer->setInfo(dynamicInfo);
+        }
     }
     work->worklets.clear();
     work->worklets.emplace_back(new C2Worklet);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 82460c9..296d7ed 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -30,6 +30,7 @@
 #include <android/hardware/media/c2/1.0/IInputSurface.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
 #include <android/hardware/media/omx/1.0/IOmx.h>
+#include <android-base/properties.h>
 #include <android-base/stringprintf.h>
 #include <cutils/properties.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -212,9 +213,8 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        mSource->configure(
-                mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
-        return OK;
+        return GetStatus(mSource->configure(
+                mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace)));
     }
 
     void disconnect() override {
@@ -872,6 +872,11 @@
                         }
                         config->mTunneled = true;
                     }
+
+                    int32_t pushBlankBuffersOnStop = 0;
+                    if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
+                        config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
+                    }
                 }
             }
             setSurface(surface);
@@ -1007,7 +1012,9 @@
             // Query vendor format for Flexible YUV
             std::vector<std::unique_ptr<C2Param>> heapParams;
             C2StoreFlexiblePixelFormatDescriptorsInfo *pixelFormatInfo = nullptr;
-            if (mClient->query(
+            int vendorSdkVersion = base::GetIntProperty(
+                    "ro.vendor.build.version.sdk", android_get_device_api_level());
+            if (vendorSdkVersion >= __ANDROID_API_S__ && mClient->query(
                         {},
                         {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
                         C2_MAY_BLOCK,
@@ -1018,29 +1025,31 @@
             } else {
                 pixelFormatInfo = nullptr;
             }
-            std::optional<uint32_t> flexPixelFormat{};
-            std::optional<uint32_t> flexPlanarPixelFormat{};
-            std::optional<uint32_t> flexSemiPlanarPixelFormat{};
+            // bit depth -> format
+            std::map<uint32_t, uint32_t> flexPixelFormat;
+            std::map<uint32_t, uint32_t> flexPlanarPixelFormat;
+            std::map<uint32_t, uint32_t> flexSemiPlanarPixelFormat;
             if (pixelFormatInfo && *pixelFormatInfo) {
                 for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
                     const C2FlexiblePixelFormatDescriptorStruct &desc =
                         pixelFormatInfo->m.values[i];
-                    if (desc.bitDepth != 8
-                            || desc.subsampling != C2Color::YUV_420
+                    if (desc.subsampling != C2Color::YUV_420
                             // TODO(b/180076105): some device report wrong layout
                             // || desc.layout == C2Color::INTERLEAVED_PACKED
                             // || desc.layout == C2Color::INTERLEAVED_ALIGNED
                             || desc.layout == C2Color::UNKNOWN_LAYOUT) {
                         continue;
                     }
-                    if (!flexPixelFormat) {
-                        flexPixelFormat = desc.pixelFormat;
+                    if (flexPixelFormat.count(desc.bitDepth) == 0) {
+                        flexPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
                     }
-                    if (desc.layout == C2Color::PLANAR_PACKED && !flexPlanarPixelFormat) {
-                        flexPlanarPixelFormat = desc.pixelFormat;
+                    if (desc.layout == C2Color::PLANAR_PACKED
+                            && flexPlanarPixelFormat.count(desc.bitDepth) == 0) {
+                        flexPlanarPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
                     }
-                    if (desc.layout == C2Color::SEMIPLANAR_PACKED && !flexSemiPlanarPixelFormat) {
-                        flexSemiPlanarPixelFormat = desc.pixelFormat;
+                    if (desc.layout == C2Color::SEMIPLANAR_PACKED
+                            && flexSemiPlanarPixelFormat.count(desc.bitDepth) == 0) {
+                        flexSemiPlanarPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
                     }
                 }
             }
@@ -1050,7 +1059,7 @@
                 if (!(config->mDomain & Config::IS_ENCODER)) {
                     if (surface == nullptr) {
                         const char *prefix = "";
-                        if (flexSemiPlanarPixelFormat) {
+                        if (flexSemiPlanarPixelFormat.count(8) != 0) {
                             format = COLOR_FormatYUV420SemiPlanar;
                             prefix = "semi-";
                         } else {
@@ -1065,19 +1074,47 @@
                 }
             } else {
                 if ((config->mDomain & Config::IS_ENCODER) || !surface) {
+                    if (vendorSdkVersion < __ANDROID_API_S__ &&
+                            (format == COLOR_FormatYUV420Flexible ||
+                             format == COLOR_FormatYUV420Planar ||
+                             format == COLOR_FormatYUV420PackedPlanar ||
+                             format == COLOR_FormatYUV420SemiPlanar ||
+                             format == COLOR_FormatYUV420PackedSemiPlanar)) {
+                        // pre-S framework used to map these color formats into YV12.
+                        // Codecs from older vendor partition may be relying on
+                        // this assumption.
+                        format = HAL_PIXEL_FORMAT_YV12;
+                    }
                     switch (format) {
                         case COLOR_FormatYUV420Flexible:
-                            format = flexPixelFormat.value_or(COLOR_FormatYUV420Planar);
+                            format = COLOR_FormatYUV420Planar;
+                            if (flexPixelFormat.count(8) != 0) {
+                                format = flexPixelFormat[8];
+                            }
                             break;
                         case COLOR_FormatYUV420Planar:
                         case COLOR_FormatYUV420PackedPlanar:
-                            format = flexPlanarPixelFormat.value_or(
-                                    flexPixelFormat.value_or(format));
+                            if (flexPlanarPixelFormat.count(8) != 0) {
+                                format = flexPlanarPixelFormat[8];
+                            } else if (flexPixelFormat.count(8) != 0) {
+                                format = flexPixelFormat[8];
+                            }
                             break;
                         case COLOR_FormatYUV420SemiPlanar:
                         case COLOR_FormatYUV420PackedSemiPlanar:
-                            format = flexSemiPlanarPixelFormat.value_or(
-                                    flexPixelFormat.value_or(format));
+                            if (flexSemiPlanarPixelFormat.count(8) != 0) {
+                                format = flexSemiPlanarPixelFormat[8];
+                            } else if (flexPixelFormat.count(8) != 0) {
+                                format = flexPixelFormat[8];
+                            }
+                            break;
+                        case COLOR_FormatYUVP010:
+                            format = COLOR_FormatYUVP010;
+                            if (flexSemiPlanarPixelFormat.count(10) != 0) {
+                                format = flexSemiPlanarPixelFormat[10];
+                            } else if (flexPixelFormat.count(10) != 0) {
+                                format = flexPixelFormat[10];
+                            }
                             break;
                         default:
                             // No-op
@@ -1213,11 +1250,25 @@
         std::initializer_list<C2Param::Index> indices {
             colorAspectsRequestIndex.withStream(0u),
         };
-        c2_status_t c2err = comp->query(
-                { &usage, &maxInputSize, &prepend },
-                indices,
-                C2_DONT_BLOCK,
-                &params);
+        int32_t colorTransferRequest = 0;
+        if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
+                && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
+            colorTransferRequest = 0;
+        }
+        c2_status_t c2err = C2_OK;
+        if (colorTransferRequest != 0) {
+            c2err = comp->query(
+                    { &usage, &maxInputSize, &prepend },
+                    indices,
+                    C2_DONT_BLOCK,
+                    &params);
+        } else {
+            c2err = comp->query(
+                    { &usage, &maxInputSize, &prepend },
+                    {},
+                    C2_DONT_BLOCK,
+                    &params);
+        }
         if (c2err != C2_OK && c2err != C2_BAD_INDEX) {
             ALOGE("Failed to query component interface: %d", c2err);
             return UNKNOWN_ERROR;
@@ -1332,8 +1383,8 @@
             }
         }
 
-        // set channel-mask
         if (config->mDomain & Config::IS_AUDIO) {
+            // set channel-mask
             int32_t mask;
             if (msg->findInt32(KEY_CHANNEL_MASK, &mask)) {
                 if (config->mDomain & Config::IS_ENCODER) {
@@ -1342,6 +1393,15 @@
                     config->mOutputFormat->setInt32(KEY_CHANNEL_MASK, mask);
                 }
             }
+
+            // set PCM encoding
+            int32_t pcmEncoding = kAudioEncodingPcm16bit;
+            msg->findInt32(KEY_PCM_ENCODING, &pcmEncoding);
+            if (encoder) {
+                config->mInputFormat->setInt32("android._config-pcm-encoding", pcmEncoding);
+            } else {
+                config->mOutputFormat->setInt32("android._config-pcm-encoding", pcmEncoding);
+            }
         }
 
         std::unique_ptr<C2Param> colorTransferRequestParam;
@@ -1351,11 +1411,6 @@
                 colorTransferRequestParam = std::move(param);
             }
         }
-        int32_t colorTransferRequest = 0;
-        if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
-                && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
-            colorTransferRequest = 0;
-        }
 
         if (colorTransferRequest != 0) {
             if (colorTransferRequestParam && *colorTransferRequestParam) {
@@ -1380,7 +1435,7 @@
                 int64_t blockUsage =
                     usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
                 std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
-                        width, height, pixelFormat, blockUsage, {comp->getName()});
+                        width, height, componentColorFormat, blockUsage, {comp->getName()});
                 sp<GraphicBlockBuffer> buffer;
                 if (block) {
                     buffer = GraphicBlockBuffer::Allocate(
@@ -1421,6 +1476,35 @@
             }
         }
 
+        if (config->mTunneled) {
+            config->mOutputFormat->setInt32("android._tunneled", 1);
+        }
+
+        // Convert an encoding statistics level to corresponding encoding statistics
+        // kinds
+        int32_t encodingStatisticsLevel = VIDEO_ENCODING_STATISTICS_LEVEL_NONE;
+        if ((config->mDomain & Config::IS_ENCODER)
+            && (config->mDomain & Config::IS_VIDEO)
+            && msg->findInt32(KEY_VIDEO_ENCODING_STATISTICS_LEVEL, &encodingStatisticsLevel)) {
+            // Higher level include all the enc stats belong to lower level.
+            switch (encodingStatisticsLevel) {
+                // case VIDEO_ENCODING_STATISTICS_LEVEL_2: // reserved for the future level 2
+                                                           // with more enc stat kinds
+                // Future extended encoding statistics for the level 2 should be added here
+                case VIDEO_ENCODING_STATISTICS_LEVEL_1:
+                    config->subscribeToConfigUpdate(
+                            comp,
+                            {
+                                C2AndroidStreamAverageBlockQuantizationInfo::output::PARAM_TYPE,
+                                C2StreamPictureTypeInfo::output::PARAM_TYPE,
+                            });
+                    break;
+                case VIDEO_ENCODING_STATISTICS_LEVEL_NONE:
+                    break;
+            }
+        }
+        ALOGD("encoding statistics level = %d", encodingStatisticsLevel);
+
         ALOGD("setup formats input: %s",
                 config->mInputFormat->debugString().c_str());
         ALOGD("setup formats output: %s",
@@ -1465,6 +1549,9 @@
     using namespace android::hardware::graphics::bufferqueue::V1_0::utils;
     typedef android::hardware::media::omx::V1_0::Status OmxStatus;
     android::sp<IOmx> omx = IOmx::getService();
+    if (omx == nullptr) {
+        return nullptr;
+    }
     typedef android::hardware::graphics::bufferqueue::V1_0::
             IGraphicBufferProducer HGraphicBufferProducer;
     typedef android::hardware::media::omx::V1_0::
@@ -1742,9 +1829,21 @@
     if (tryAndReportOnError(setRunning) != OK) {
         return;
     }
+
+    // preparation of input buffers may not succeed due to the lack of
+    // memory; returning correct error code (NO_MEMORY) as an error allows
+    // MediaCodec to try reclaim and restart codec gracefully.
+    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+    err2 = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    if (err2 != OK) {
+        ALOGE("Initial preparation for Input Buffers failed");
+        mCallback->onError(err2, ACTION_CODE_FATAL);
+        return;
+    }
+
     mCallback->onStartCompleted();
 
-    (void)mChannel->requestInitialInputBuffers();
+    mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
 }
 
 void CCodec::initiateShutdown(bool keepComponentAllocated) {
@@ -1770,7 +1869,13 @@
         }
         state->set(STOPPING);
     }
-
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mPushBlankBuffersOnStop) {
+            mChannel->pushBlankBufferToOutputSurface();
+        }
+    }
     mChannel->reset();
     (new AMessage(kWhatStop, this))->post();
 }
@@ -1858,6 +1963,13 @@
             config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mPushBlankBuffersOnStop) {
+            mChannel->pushBlankBufferToOutputSurface();
+        }
+    }
 
     mChannel->reset();
     // thiz holds strong ref to this while the thread is running.
@@ -1896,9 +2008,11 @@
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
+        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+        status_t err = OK;
+
         if (config->mTunneled && config->mSidebandHandle != nullptr) {
-            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
-            status_t err = native_window_set_sideband_stream(
+            err = native_window_set_sideband_stream(
                     nativeWindow.get(),
                     const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
             if (err != OK) {
@@ -1906,6 +2020,15 @@
                         nativeWindow.get(), config->mSidebandHandle->handle(), err);
                 return err;
             }
+        } else {
+            // Explicitly reset the sideband handle of the window for
+            // non-tunneled video in case the window was previously used
+            // for a tunneled video playback.
+            err = native_window_set_sideband_stream(nativeWindow.get(), nullptr);
+            if (err != OK) {
+                ALOGE("native_window_set_sideband_stream(nullptr) failed! (err %d).", err);
+                return err;
+            }
         }
     }
     return mChannel->setSurface(surface);
@@ -2014,7 +2137,14 @@
         state->set(RUNNING);
     }
 
-    (void)mChannel->requestInitialInputBuffers();
+    std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    if (err != OK) {
+        ALOGE("Resume request for Input Buffers failed");
+        mCallback->onError(err, ACTION_CODE_FATAL);
+        return;
+    }
+    mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
 }
 
 void CCodec::signalSetParameters(const sp<AMessage> &msg) {
@@ -2299,7 +2429,8 @@
                         C2StreamColorAspectsInfo::output::PARAM_TYPE,
                         C2StreamDataSpaceInfo::output::PARAM_TYPE,
                         C2StreamHdrStaticInfo::output::PARAM_TYPE,
-                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+                        C2StreamHdr10PlusInfo::output::PARAM_TYPE,  // will be deprecated
+                        C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
                         C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
                         C2StreamSurfaceScalingInfo::output::PARAM_TYPE
                     };
@@ -2546,7 +2677,10 @@
         std::vector<std::unique_ptr<C2Param>> params;
         err = intf->query(
                 {&mApiFeatures},
-                {C2PortAllocatorsTuning::input::PARAM_TYPE},
+                {
+                    C2StreamBufferTypeSetting::input::PARAM_TYPE,
+                    C2PortAllocatorsTuning::input::PARAM_TYPE
+                },
                 C2_MAY_BLOCK,
                 &params);
         if (err != C2_OK && err != C2_BAD_INDEX) {
@@ -2559,7 +2693,10 @@
             if (!param) {
                 continue;
             }
-            if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
+            if (param->type() == C2StreamBufferTypeSetting::input::PARAM_TYPE) {
+                mInputStreamFormat.reset(
+                        C2StreamBufferTypeSetting::input::From(param));
+            } else if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
                 mInputAllocators.reset(
                         C2PortAllocatorsTuning::input::From(param));
             }
@@ -2579,6 +2716,16 @@
         return mApiFeatures;
     }
 
+    const C2StreamBufferTypeSetting::input &getInputStreamFormat() const {
+        static std::unique_ptr<C2StreamBufferTypeSetting::input> sInvalidated = []{
+            std::unique_ptr<C2StreamBufferTypeSetting::input> param;
+            param.reset(new C2StreamBufferTypeSetting::input(0u, C2BufferData::INVALID));
+            param->invalidate();
+            return param;
+        }();
+        return mInputStreamFormat ? *mInputStreamFormat : *sInvalidated;
+    }
+
     const C2PortAllocatorsTuning::input &getInputAllocators() const {
         static std::unique_ptr<C2PortAllocatorsTuning::input> sInvalidated = []{
             std::unique_ptr<C2PortAllocatorsTuning::input> param =
@@ -2594,6 +2741,7 @@
 
     std::vector<C2FieldSupportedValuesQuery> mFields;
     C2ApiFeaturesSetting mApiFeatures;
+    std::unique_ptr<C2StreamBufferTypeSetting::input> mInputStreamFormat;
     std::unique_ptr<C2PortAllocatorsTuning::input> mInputAllocators;
 };
 
@@ -2635,6 +2783,24 @@
         if (intfCache.initCheck() != OK) {
             continue;
         }
+        const C2StreamBufferTypeSetting::input &streamFormat = intfCache.getInputStreamFormat();
+        if (streamFormat) {
+            C2Allocator::type_t allocatorType = C2Allocator::LINEAR;
+            if (streamFormat.value == C2BufferData::GRAPHIC
+                    || streamFormat.value == C2BufferData::GRAPHIC_CHUNKS) {
+                allocatorType = C2Allocator::GRAPHIC;
+            }
+
+            if (type != allocatorType) {
+                // requested type is not supported at input allocators
+                ids->clear();
+                ids->insert(defaultAllocatorId);
+                ALOGV("name(%s) does not support a type(0x%x) as input allocator."
+                        " uses default allocator id(%d)", name.c_str(), type, defaultAllocatorId);
+                break;
+            }
+        }
+
         const C2PortAllocatorsTuning::input &allocators = intfCache.getInputAllocators();
         if (firstIteration) {
             firstIteration = false;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6f7b7f7..5ecb130 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -15,8 +15,11 @@
  */
 
 //#define LOG_NDEBUG 0
+#include <utils/Errors.h>
 #define LOG_TAG "CCodecBufferChannel"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <algorithm>
 #include <atomic>
@@ -44,9 +47,9 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/SurfaceUtils.h>
 #include <media/MediaCodecBuffer.h>
 #include <mediadrm/ICrypto.h>
 #include <system/window.h>
@@ -254,7 +257,7 @@
                 bool released = input->buffers->releaseBuffer(buffer, nullptr, true);
                 ALOGV("[%s] queueInputBuffer: buffer copied; %sreleased",
                       mName, released ? "" : "not ");
-                buffer.clear();
+                buffer = copy;
             } else {
                 ALOGW("[%s] queueInputBuffer: failed to copy a buffer; this may cause input "
                       "buffer starvation on component.", mName);
@@ -282,6 +285,12 @@
             }
         }
     } else if (eos) {
+        Mutexed<Input>::Locked input(mInput);
+        if (input->frameReassembler) {
+            usesFrameReassembler = true;
+            // drain any pending items with eos
+            input->frameReassembler.process(buffer, &items);
+        }
         flags |= C2FrameData::FLAG_END_OF_STREAM;
     }
     if (usesFrameReassembler) {
@@ -321,6 +330,8 @@
     }
     c2_status_t err = C2_OK;
     if (!items.empty()) {
+        ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+                "CCodecBufferChannel::queue(%s@ts=%lld)", mName, (long long)timeUs).c_str());
         {
             Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
             PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
@@ -341,10 +352,10 @@
     } else {
         Mutexed<Input>::Locked input(mInput);
         bool released = false;
-        if (buffer) {
-            released = input->buffers->releaseBuffer(buffer, nullptr, true);
-        } else if (copy) {
+        if (copy) {
             released = input->extraBuffers.releaseSlot(copy, nullptr, true);
+        } else if (buffer) {
+            released = input->buffers->releaseBuffer(buffer, nullptr, true);
         }
         ALOGV("[%s] queueInputBuffer: buffer%s %sreleased",
               mName, (buffer == nullptr) ? "(copy)" : "", released ? "" : "not ");
@@ -421,6 +432,10 @@
     for (size_t i = 0; i < numSubSamples; ++i) {
         size += subSamples[i].mNumBytesOfClearData + subSamples[i].mNumBytesOfEncryptedData;
     }
+    if (size == 0) {
+        buffer->setRange(0, 0);
+        return OK;
+    }
     std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
     std::shared_ptr<C2LinearBlock> block;
     c2_status_t err = pool->fetchLinearBlock(
@@ -428,6 +443,8 @@
             secure ? kSecureUsage : kDefaultReadWriteUsage,
             &block);
     if (err != C2_OK) {
+        ALOGI("[%s] attachEncryptedBuffer: fetchLinearBlock failed: size = %zu (%s) err = %d",
+              mName, size, secure ? "secure" : "non-secure", err);
         return NO_MEMORY;
     }
     if (!secure) {
@@ -452,18 +469,9 @@
                 key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
                 dst, &errorDetailMsg);
         if (result < 0) {
+            ALOGI("[%s] attachEncryptedBuffer: decrypt failed: result = %zd", mName, result);
             return result;
         }
-        if (dst.type == DrmBufferType::SHARED_MEMORY) {
-            C2WriteView view = block->map().get();
-            if (view.error() != C2_OK) {
-                return false;
-            }
-            if (view.size() < result) {
-                return false;
-            }
-            memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
-        }
     } else {
         // Here we cast CryptoPlugin::SubSample to hardware::cas::native::V1_0::SubSample
         // directly, the structure definitions should match as checked in DescramblerImpl.cpp.
@@ -512,16 +520,22 @@
         }
 
         if (result < codecDataOffset) {
-            ALOGD("invalid codec data offset: %zd, result %zd", codecDataOffset, result);
+            ALOGD("[%s] invalid codec data offset: %zd, result %zd",
+                  mName, codecDataOffset, result);
             return BAD_VALUE;
         }
     }
     if (!secure) {
         C2WriteView view = block->map().get();
         if (view.error() != C2_OK) {
+            ALOGI("[%s] attachEncryptedBuffer: block map error: %d (non-secure)",
+                  mName, view.error());
             return UNKNOWN_ERROR;
         }
         if (view.size() < result) {
+            ALOGI("[%s] attachEncryptedBuffer: block size too small: size=%u result=%zd "
+                  "(non-secure)",
+                  mName, view.size(), result);
             return UNKNOWN_ERROR;
         }
         memcpy(view.data(), mDecryptDestination->unsecurePointer(), result);
@@ -529,6 +543,7 @@
     std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
             block->share(codecDataOffset, result - codecDataOffset, C2Fence{}))};
     if (!buffer->copy(c2Buffer)) {
+        ALOGI("[%s] attachEncryptedBuffer: buffer copy failed", mName);
         return -ENOSYS;
     }
     return OK;
@@ -834,12 +849,54 @@
         hdr10PlusInfo.reset();
     }
 
+    // HDR dynamic info
+    std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> hdrDynamicInfo =
+        std::static_pointer_cast<const C2StreamHdrDynamicMetadataInfo::output>(
+                c2Buffer->getInfo(C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE));
+    // TODO: make this sticky & enable unset
+    if (hdrDynamicInfo && hdrDynamicInfo->flexCount() == 0) {
+        hdrDynamicInfo.reset();
+    }
+
+    if (hdr10PlusInfo) {
+        // C2StreamHdr10PlusInfo is deprecated; components should use
+        // C2StreamHdrDynamicMetadataInfo
+        // TODO: #metric
+        if (hdrDynamicInfo) {
+            // It is unexpected that C2StreamHdr10PlusInfo and
+            // C2StreamHdrDynamicMetadataInfo is both present.
+            // C2StreamHdrDynamicMetadataInfo takes priority.
+            // TODO: #metric
+        } else {
+            std::shared_ptr<C2StreamHdrDynamicMetadataInfo::output> info =
+                    C2StreamHdrDynamicMetadataInfo::output::AllocShared(
+                            hdr10PlusInfo->flexCount(),
+                            0u,
+                            C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+            memcpy(info->m.data, hdr10PlusInfo->m.value, hdr10PlusInfo->flexCount());
+            hdrDynamicInfo = info;
+        }
+    }
+
     std::vector<C2ConstGraphicBlock> blocks = c2Buffer->data().graphicBlocks();
     if (blocks.size() != 1u) {
         ALOGD("[%s] expected 1 graphic block, but got %zu", mName, blocks.size());
         return UNKNOWN_ERROR;
     }
     const C2ConstGraphicBlock &block = blocks.front();
+    C2Fence c2fence = block.fence();
+    sp<Fence> fence = Fence::NO_FENCE;
+    // TODO: it's not sufficient to just check isHW() and then construct android::fence from it.
+    // Once C2Fence::type() is added, check the exact C2Fence type
+    if (c2fence.isHW()) {
+        int fenceFd = c2fence.fd();
+        fence = sp<Fence>::make(fenceFd);
+        if (!fence) {
+            ALOGE("[%s] Failed to allocate a fence", mName);
+            close(fenceFd);
+            return NO_MEMORY;
+        }
+    }
 
     // TODO: revisit this after C2Fence implementation.
     android::IGraphicBufferProducer::QueueBufferInput qbi(
@@ -852,8 +909,8 @@
                  blocks.front().crop().bottom()),
             videoScalingMode,
             transform,
-            Fence::NO_FENCE, 0);
-    if (hdrStaticInfo || hdr10PlusInfo) {
+            fence, 0);
+    if (hdrStaticInfo || hdrDynamicInfo) {
         HdrMetadata hdr;
         if (hdrStaticInfo) {
             // If mastering max and min luminance fields are 0, do not use them.
@@ -889,14 +946,22 @@
                 hdr.validTypes |= HdrMetadata::CTA861_3;
                 hdr.cta8613 = cta861_meta;
             }
+
+            // does not have valid info
+            if (!(hdr.validTypes & (HdrMetadata::SMPTE2086 | HdrMetadata::CTA861_3))) {
+                hdrStaticInfo.reset();
+            }
         }
-        if (hdr10PlusInfo) {
+        if (hdrDynamicInfo
+                && hdrDynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
             hdr.validTypes |= HdrMetadata::HDR10PLUS;
             hdr.hdr10plus.assign(
-                    hdr10PlusInfo->m.value,
-                    hdr10PlusInfo->m.value + hdr10PlusInfo->flexCount());
+                    hdrDynamicInfo->m.data,
+                    hdrDynamicInfo->m.data + hdrDynamicInfo->flexCount());
         }
         qbi.setHdrMetadata(hdr);
+
+        SetHdrMetadataToGralloc4Handle(hdrStaticInfo, hdrDynamicInfo, block.handle());
     }
     // we don't have dirty regions
     qbi.setSurfaceDamage(Region::INVALID_REGION);
@@ -951,6 +1016,10 @@
     array->clear();
     Mutexed<Input>::Locked input(mInput);
 
+    if (!input->buffers) {
+        ALOGE("getInputBufferArray: No Input Buffers allocated");
+        return;
+    }
     if (!input->buffers->isArrayMode()) {
         input->buffers = input->buffers->toArrayMode(input->numSlots);
     }
@@ -961,7 +1030,10 @@
 void CCodecBufferChannel::getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
     array->clear();
     Mutexed<Output>::Locked output(mOutput);
-
+    if (!output->buffers) {
+        ALOGE("getOutputBufferArray: No Output Buffers allocated");
+        return;
+    }
     if (!output->buffers->isArrayMode()) {
         output->buffers = output->buffers->toArrayMode(output->numSlots);
     }
@@ -1382,6 +1454,12 @@
                 }
             }
         }
+
+        int32_t tunneled = 0;
+        if (!outputFormat->findInt32("android._tunneled", &tunneled)) {
+            tunneled = 0;
+        }
+        mTunneled = (tunneled != 0);
     }
 
     // Set up pipeline control. This has to be done after mInputBuffers and
@@ -1403,54 +1481,47 @@
     return OK;
 }
 
-status_t CCodecBufferChannel::requestInitialInputBuffers() {
+status_t CCodecBufferChannel::prepareInitialInputBuffers(
+        std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers) {
     if (mInputSurface) {
         return OK;
     }
 
+    size_t numInputSlots = mInput.lock()->numSlots;
+
+    {
+        Mutexed<Input>::Locked input(mInput);
+        while (clientInputBuffers->size() < numInputSlots) {
+            size_t index;
+            sp<MediaCodecBuffer> buffer;
+            if (!input->buffers->requestNewBuffer(&index, &buffer)) {
+                break;
+            }
+            clientInputBuffers->emplace(index, buffer);
+        }
+    }
+    if (clientInputBuffers->empty()) {
+        ALOGW("[%s] start: cannot allocate memory at all", mName);
+        return NO_MEMORY;
+    } else if (clientInputBuffers->size() < numInputSlots) {
+        ALOGD("[%s] start: cannot allocate memory for all slots, "
+              "only %zu buffers allocated",
+              mName, clientInputBuffers->size());
+    } else {
+        ALOGV("[%s] %zu initial input buffers available",
+              mName, clientInputBuffers->size());
+    }
+    return OK;
+}
+
+status_t CCodecBufferChannel::requestInitialInputBuffers(
+        std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers) {
     C2StreamBufferTypeSetting::output oStreamFormat(0u);
     C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
     c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
     if (err != C2_OK && err != C2_BAD_INDEX) {
         return UNKNOWN_ERROR;
     }
-    size_t numInputSlots = mInput.lock()->numSlots;
-
-    struct ClientInputBuffer {
-        size_t index;
-        sp<MediaCodecBuffer> buffer;
-        size_t capacity;
-    };
-    std::list<ClientInputBuffer> clientInputBuffers;
-
-    {
-        Mutexed<Input>::Locked input(mInput);
-        while (clientInputBuffers.size() < numInputSlots) {
-            ClientInputBuffer clientInputBuffer;
-            if (!input->buffers->requestNewBuffer(&clientInputBuffer.index,
-                                                  &clientInputBuffer.buffer)) {
-                break;
-            }
-            clientInputBuffer.capacity = clientInputBuffer.buffer->capacity();
-            clientInputBuffers.emplace_back(std::move(clientInputBuffer));
-        }
-    }
-    if (clientInputBuffers.empty()) {
-        ALOGW("[%s] start: cannot allocate memory at all", mName);
-        return NO_MEMORY;
-    } else if (clientInputBuffers.size() < numInputSlots) {
-        ALOGD("[%s] start: cannot allocate memory for all slots, "
-              "only %zu buffers allocated",
-              mName, clientInputBuffers.size());
-    } else {
-        ALOGV("[%s] %zu initial input buffers available",
-              mName, clientInputBuffers.size());
-    }
-    // Sort input buffers by their capacities in increasing order.
-    clientInputBuffers.sort(
-            [](const ClientInputBuffer& a, const ClientInputBuffer& b) {
-                return a.capacity < b.capacity;
-            });
 
     std::list<std::unique_ptr<C2Work>> flushedConfigs;
     mFlushedConfigs.lock()->swap(flushedConfigs);
@@ -1472,25 +1543,31 @@
         }
     }
     if (oStreamFormat.value == C2BufferData::LINEAR &&
-            (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
-        sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
+            (!prepend || prepend.value == PREPEND_HEADER_TO_NONE) &&
+            !clientInputBuffers.empty()) {
+        size_t minIndex = clientInputBuffers.begin()->first;
+        sp<MediaCodecBuffer> minBuffer = clientInputBuffers.begin()->second;
+        for (const auto &[index, buffer] : clientInputBuffers) {
+            if (minBuffer->capacity() > buffer->capacity()) {
+                minIndex = index;
+                minBuffer = buffer;
+            }
+        }
         // WORKAROUND: Some apps expect CSD available without queueing
         //             any input. Queue an empty buffer to get the CSD.
-        buffer->setRange(0, 0);
-        buffer->meta()->clear();
-        buffer->meta()->setInt64("timeUs", 0);
-        if (queueInputBufferInternal(buffer) != OK) {
+        minBuffer->setRange(0, 0);
+        minBuffer->meta()->clear();
+        minBuffer->meta()->setInt64("timeUs", 0);
+        if (queueInputBufferInternal(minBuffer) != OK) {
             ALOGW("[%s] Error while queueing an empty buffer to get CSD",
                   mName);
             return UNKNOWN_ERROR;
         }
-        clientInputBuffers.pop_front();
+        clientInputBuffers.erase(minIndex);
     }
 
-    for (const ClientInputBuffer& clientInputBuffer: clientInputBuffers) {
-        mCallback->onInputBufferAvailable(
-                clientInputBuffer.index,
-                clientInputBuffer.buffer);
+    for (const auto &[index, buffer] : clientInputBuffers) {
+        mCallback->onInputBufferAvailable(index, buffer);
     }
 
     return OK;
@@ -1516,6 +1593,14 @@
         Mutexed<Output>::Locked output(mOutput);
         output->buffers.reset();
     }
+    if (mOutputSurface.lock()->surface) {
+        C2BlockPool::local_id_t outputPoolId;
+        {
+            Mutexed<BlockPools>::Locked pools(mBlockPools);
+            outputPoolId = pools->outputPoolId;
+        }
+        mComponent->stopUsingOutputSurface(outputPoolId);
+    }
 }
 
 void CCodecBufferChannel::release() {
@@ -1848,7 +1933,7 @@
 
     int32_t flags = 0;
     if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
-        flags |= MediaCodec::BUFFER_FLAG_EOS;
+        flags |= BUFFER_FLAG_END_OF_STREAM;
         ALOGV("[%s] onWorkDone: output EOS", mName);
     }
 
@@ -1865,6 +1950,8 @@
         // When using input surface we need to restore the original input timestamp.
         timestamp = work->input.ordinal.customOrdinal;
     }
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::onWorkDone(%s@ts=%lld)", mName, timestamp.peekll()).c_str());
     ALOGV("[%s] onWorkDone: input %lld, codec %lld => output %lld => %lld",
           mName,
           work->input.ordinal.customOrdinal.peekll(),
@@ -1886,7 +1973,7 @@
         sp<MediaCodecBuffer> outBuffer;
         if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
             outBuffer->meta()->setInt64("timeUs", timestamp.peek());
-            outBuffer->meta()->setInt32("flags", MediaCodec::BUFFER_FLAG_CODECCONFIG);
+            outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
             ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
 
             output.unlock();
@@ -1899,10 +1986,21 @@
         }
     }
 
+    bool drop = false;
+    if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+        ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+        drop = true;
+    }
+
     if (notifyClient && !buffer && !flags) {
-        ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
-              mName, work->input.ordinal.frameIndex.peekull());
-        notifyClient = false;
+        if (mTunneled && drop && outputFormat) {
+            ALOGV("[%s] onWorkDone: Keep tunneled, drop frame with format change (%lld)",
+                  mName, work->input.ordinal.frameIndex.peekull());
+        } else {
+            ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
+                  mName, work->input.ordinal.frameIndex.peekull());
+            notifyClient = false;
+        }
     }
 
     if (buffer) {
@@ -1911,7 +2009,7 @@
             switch (info->coreIndex().coreIndex()) {
                 case C2StreamPictureTypeMaskInfo::CORE_INDEX:
                     if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2Config::SYNC_FRAME) {
-                        flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+                        flags |= BUFFER_FLAG_KEY_FRAME;
                     }
                     break;
                 default:
@@ -1943,6 +2041,9 @@
     sp<MediaCodecBuffer> outBuffer;
     std::shared_ptr<C2Buffer> c2Buffer;
 
+    constexpr int kMaxReallocTry = 5;
+    int reallocTryNum = 0;
+
     while (true) {
         Mutexed<Output>::Locked output(mOutput);
         if (!output->buffers) {
@@ -1950,6 +2051,9 @@
         }
         action = output->buffers->popFromStashAndRegister(
                 &c2Buffer, &index, &outBuffer);
+        if (action != OutputBuffers::REALLOCATE) {
+            reallocTryNum = 0;
+        }
         switch (action) {
         case OutputBuffers::SKIP:
             return;
@@ -1960,6 +2064,13 @@
             mCallback->onOutputBufferAvailable(index, outBuffer);
             break;
         case OutputBuffers::REALLOCATE:
+            if (++reallocTryNum > kMaxReallocTry) {
+                output.unlock();
+                ALOGE("[%s] sendOutputBuffers: tried %d realloc and failed",
+                          mName, kMaxReallocTry);
+                mCCodecCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                return;
+            }
             if (!output->buffers->isArrayMode()) {
                 output->buffers =
                     output->buffers->toArrayMode(output->numSlots);
@@ -2031,12 +2142,13 @@
 }
 
 PipelineWatcher::Clock::duration CCodecBufferChannel::elapsed() {
-    // When client pushed EOS, we want all the work to be done quickly.
     // Otherwise, component may have stalled work due to input starvation up to
     // the sum of the delay in the pipeline.
+    // TODO(b/231253301): When client pushed EOS, the pipeline could have less
+    //                    number of frames.
     size_t n = 0;
-    if (!mInputMetEos) {
-        size_t outputDelay = mOutput.lock()->outputDelay;
+    size_t outputDelay = mOutput.lock()->outputDelay;
+    {
         Mutexed<Input>::Locked input(mInput);
         n = input->inputDelay + input->pipelineDelay + outputDelay;
     }
@@ -2115,4 +2227,13 @@
     }
 }
 
+status_t CCodecBufferChannel::pushBlankBufferToOutputSurface() {
+  Mutexed<OutputSurface>::Locked output(mOutputSurface);
+  sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(output->surface.get());
+  if (nativeWindow == nullptr) {
+      return INVALID_OPERATION;
+  }
+  return pushBlankBuffersToNativeWindow(nativeWindow.get());
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 4db69cb..f29a225 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -130,9 +130,23 @@
             bool buffersBoundToCodec);
 
     /**
-     * Request initial input buffers to be filled by client.
+     * Prepare initial input buffers to be filled by client.
+     *
+     * \param clientInputBuffers[out]   pointer to slot index -> buffer map.
+     *                                  On success, it contains prepared
+     *                                  initial input buffers.
      */
-    status_t requestInitialInputBuffers();
+    status_t prepareInitialInputBuffers(
+            std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers);
+
+    /**
+     * Request initial input buffers as prepared in clientInputBuffers.
+     *
+     * \param clientInputBuffers[in]    slot index -> buffer map with prepared
+     *                                  initial input buffers.
+     */
+    status_t requestInitialInputBuffers(
+            std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers);
 
     /**
      * Stop queueing buffers to the component. This object should never queue
@@ -181,6 +195,11 @@
 
     void setMetaMode(MetaMode mode);
 
+    /**
+     * Push a blank buffer to the configured native output surface.
+     */
+    status_t pushBlankBufferToOutputSurface();
+
 private:
     class QueueGuard;
 
@@ -324,6 +343,8 @@
         return mCrypto != nullptr || mDescrambler != nullptr;
     }
     std::atomic_bool mSendEncryptedInfoBuffer;
+
+    std::atomic_bool mTunneled;
 };
 
 // Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 333a2ca..0f4a8d8 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -21,7 +21,7 @@
 #include <C2PlatformSupport.h>
 
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SkipCutBuffer.h>
 #include <mediadrm/ICrypto.h>
@@ -33,7 +33,9 @@
 
 namespace {
 
-sp<GraphicBlockBuffer> AllocateGraphicBuffer(
+constexpr uint32_t PIXEL_FORMAT_UNKNOWN = 0;
+
+sp<GraphicBlockBuffer> AllocateInputGraphicBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const sp<AMessage> &format,
         uint32_t pixelFormat,
@@ -45,9 +47,13 @@
         return nullptr;
     }
 
+    int64_t usageValue = 0;
+    (void)format->findInt64("android._C2MemoryUsage", &usageValue);
+    C2MemoryUsage fullUsage{usageValue | usage.expected};
+
     std::shared_ptr<C2GraphicBlock> block;
     c2_status_t err = pool->fetchGraphicBlock(
-            width, height, pixelFormat, usage, &block);
+            width, height, pixelFormat, fullUsage, &block);
     if (err != C2_OK) {
         ALOGD("fetch graphic block failed: %d", err);
         return nullptr;
@@ -132,6 +138,7 @@
     if (!copy->copy(c2buffer)) {
         return nullptr;
     }
+    copy->meta()->extend(buffer->meta());
     return copy;
 }
 
@@ -199,6 +206,61 @@
     mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
 }
 
+bool OutputBuffers::convert(
+        const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst) {
+    if (src && src->data().type() != C2BufferData::LINEAR) {
+        return false;
+    }
+    int32_t configEncoding = kAudioEncodingPcm16bit;
+    int32_t codecEncoding = kAudioEncodingPcm16bit;
+    if (mFormat->findInt32("android._codec-pcm-encoding", &codecEncoding)
+            && mFormat->findInt32("android._config-pcm-encoding", &configEncoding)) {
+        if (mSrcEncoding != codecEncoding || mDstEncoding != configEncoding) {
+            if (codecEncoding != configEncoding) {
+                mDataConverter = AudioConverter::Create(
+                        (AudioEncoding)codecEncoding, (AudioEncoding)configEncoding);
+                ALOGD_IF(mDataConverter, "[%s] Converter created from %d to %d",
+                         mName, codecEncoding, configEncoding);
+                mFormatWithConverter = mFormat->dup();
+                mFormatWithConverter->setInt32(KEY_PCM_ENCODING, configEncoding);
+            } else {
+                mDataConverter = nullptr;
+                mFormatWithConverter = nullptr;
+            }
+            mSrcEncoding = codecEncoding;
+            mDstEncoding = configEncoding;
+        }
+        if (int encoding; !mFormat->findInt32(KEY_PCM_ENCODING, &encoding)
+                || encoding != mDstEncoding) {
+        }
+    }
+    if (!mDataConverter) {
+        return false;
+    }
+    sp<MediaCodecBuffer> srcBuffer;
+    if (src) {
+        srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+    } else {
+        srcBuffer = new MediaCodecBuffer(mFormat, new ABuffer(0));
+    }
+    if (!srcBuffer) {
+        return false;
+    }
+    if (!*dst) {
+        *dst = new Codec2Buffer(
+                mFormat,
+                new ABuffer(mDataConverter->targetSize(srcBuffer->size())));
+    }
+    sp<MediaCodecBuffer> dstBuffer = *dst;
+    status_t err = mDataConverter->convert(srcBuffer, dstBuffer);
+    if (err != OK) {
+        ALOGD("[%s] buffer conversion failed: %d", mName, err);
+        return false;
+    }
+    dstBuffer->setFormat(mFormatWithConverter);
+    return true;
+}
+
 void OutputBuffers::clearStash() {
     mPending.clear();
     mReorderStash.clear();
@@ -236,7 +298,7 @@
         int32_t flags,
         const sp<AMessage>& format,
         const C2WorkOrdinalStruct& ordinal) {
-    bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
+    bool eos = flags & BUFFER_FLAG_END_OF_STREAM;
     if (!buffer && eos) {
         // TRICKY: we may be violating ordering of the stash here. Because we
         // don't expect any more emplace() calls after this, the ordering should
@@ -244,7 +306,7 @@
         mReorderStash.emplace_back(
                 buffer, notify, timestamp, flags, format, ordinal);
     } else {
-        flags = flags & ~MediaCodec::BUFFER_FLAG_EOS;
+        flags = flags & ~BUFFER_FLAG_END_OF_STREAM;
         auto it = mReorderStash.begin();
         for (; it != mReorderStash.end(); ++it) {
             if (less(ordinal, it->ordinal)) {
@@ -255,7 +317,7 @@
                 buffer, notify, timestamp, flags, format, ordinal);
         if (eos) {
             mReorderStash.back().flags =
-                mReorderStash.back().flags | MediaCodec::BUFFER_FLAG_EOS;
+                mReorderStash.back().flags | BUFFER_FLAG_END_OF_STREAM;
         }
     }
     while (!mReorderStash.empty() && mReorderStash.size() > mDepth) {
@@ -292,7 +354,7 @@
 
     // Flushing mReorderStash because no other buffers should come after output
     // EOS.
-    if (entry.flags & MediaCodec::BUFFER_FLAG_EOS) {
+    if (entry.flags & BUFFER_FLAG_END_OF_STREAM) {
         // Flush reorder stash
         setReorderDepth(0);
     }
@@ -887,6 +949,10 @@
         return nullptr;
     }
 
+    int64_t usageValue = 0;
+    (void)format->findInt64("android._C2MemoryUsage", &usageValue);
+    usage = C2MemoryUsage(usage.expected | usageValue);
+
     std::shared_ptr<C2LinearBlock> block;
     c2_status_t err = pool->fetchLinearBlock(capacity, usage, &block);
     if (err != C2_OK || block == nullptr) {
@@ -1031,7 +1097,7 @@
             [pool = mPool, format = mFormat, lbp = mLocalBufferPool, pixelFormat]()
                     -> sp<Codec2Buffer> {
                 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
-                return AllocateGraphicBuffer(
+                return AllocateInputGraphicBuffer(
                         pool, format, pixelFormat, usage, lbp);
             });
     return std::move(array);
@@ -1042,10 +1108,8 @@
 }
 
 sp<Codec2Buffer> GraphicInputBuffers::createNewBuffer() {
-    int64_t usageValue = 0;
-    (void)mFormat->findInt64("android._C2MemoryUsage", &usageValue);
-    C2MemoryUsage usage{usageValue | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE};
-    return AllocateGraphicBuffer(
+    C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+    return AllocateInputGraphicBuffer(
             mPool, mFormat, extractPixelFormat(mFormat), usage, mLocalBufferPool);
 }
 
@@ -1078,7 +1142,7 @@
         return err;
     }
     c2Buffer->setFormat(mFormat);
-    if (!c2Buffer->copy(buffer)) {
+    if (!convert(buffer, &c2Buffer) && !c2Buffer->copy(buffer)) {
         ALOGD("[%s] copy buffer failed", mName);
         return WOULD_BLOCK;
     }
@@ -1194,11 +1258,14 @@
         const std::shared_ptr<C2Buffer> &buffer,
         size_t *index,
         sp<MediaCodecBuffer> *clientBuffer) {
-    sp<Codec2Buffer> newBuffer = wrap(buffer);
-    if (newBuffer == nullptr) {
-        return NO_MEMORY;
+    sp<Codec2Buffer> newBuffer;
+    if (!convert(buffer, &newBuffer)) {
+        newBuffer = wrap(buffer);
+        if (newBuffer == nullptr) {
+            return NO_MEMORY;
+        }
+        newBuffer->setFormat(mFormat);
     }
-    newBuffer->setFormat(mFormat);
     *index = mImpl.assignSlot(newBuffer);
     handleImageData(newBuffer);
     *clientBuffer = newBuffer;
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 995d3a4..c8e9930 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -18,9 +18,11 @@
 
 #define CCODEC_BUFFERS_H_
 
+#include <optional>
 #include <string>
 
 #include <C2Config.h>
+#include <DataConverter.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/MediaCodecBuffer.h>
 
@@ -382,6 +384,14 @@
      */
     void submit(const sp<MediaCodecBuffer> &buffer);
 
+    /**
+     * Apply DataConverter from |src| to |*dst| if needed. If |*dst| is nullptr,
+     * a new buffer is allocated.
+     *
+     * Returns true if conversion was needed and executed; false otherwise.
+     */
+    bool convert(const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst);
+
 private:
     // SkipCutBuffer
     int32_t mDelay;
@@ -391,6 +401,12 @@
 
     void setSkipCutBuffer(int32_t skip, int32_t cut);
 
+    // DataConverter
+    sp<DataConverter> mDataConverter;
+    sp<AMessage> mFormatWithConverter;
+    std::optional<int32_t> mSrcEncoding;
+    std::optional<int32_t> mDstEncoding;
+
     // Output stash
 
     // Struct for an entry in the output stash (mPending and mReorderStash)
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 09e24be..5208be6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -16,10 +16,15 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "CCodecConfig"
+
+#include <initializer_list>
+
 #include <cutils/properties.h>
 #include <log/log.h>
 #include <utils/NativeHandle.h>
 
+#include <android-base/properties.h>
+
 #include <C2Component.h>
 #include <C2Param.h>
 #include <util/C2InterfaceHelper.h>
@@ -324,7 +329,8 @@
     : mInputFormat(new AMessage),
       mOutputFormat(new AMessage),
       mUsingSurface(false),
-      mTunneled(false) { }
+      mTunneled(false),
+      mPushBlankBuffersOnStop(false) { }
 
 void CCodecConfig::initializeStandardParams() {
     typedef Domain D;
@@ -397,10 +403,10 @@
     // Rotation
     // Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
     add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
-        .limitTo(D::VIDEO & D::CODED)
+        .limitTo((D::VIDEO | D::IMAGE) & D::CODED)
         .withMappers(negate, negate));
     add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_ROTATION, "value")
-        .limitTo(D::VIDEO & D::RAW)
+        .limitTo((D::VIDEO | D::IMAGE) & D::RAW)
         .withMappers(negate, negate));
 
     // android 'video-scaling'
@@ -510,6 +516,9 @@
     add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
         .limitTo((D::VIDEO | D::IMAGE) & D::RAW));
 
+    add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+        .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
     add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
                      C2_PARAMKEY_SECURE_MODE, "value"));
 
@@ -609,10 +618,30 @@
     add(ConfigMapper("csd-0",           C2_PARAMKEY_INIT_DATA,       "value")
         .limitTo(D::OUTPUT & D::READ));
 
-    add(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
+    deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
         .limitTo(D::VIDEO & D::PARAM & D::INPUT));
 
-    add(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
+    deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
+        .limitTo(D::VIDEO & D::OUTPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".type",
+            C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "type")
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".data",
+            C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "data")
+        .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".type",
+            C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "type")
+        .limitTo(D::VIDEO & D::OUTPUT));
+
+    add(ConfigMapper(
+            std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".data",
+            C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "data")
         .limitTo(D::VIDEO & D::OUTPUT));
 
     add(ConfigMapper(C2_PARAMKEY_TEMPORAL_LAYERING, C2_PARAMKEY_TEMPORAL_LAYERING, "")
@@ -657,24 +686,29 @@
     add(ConfigMapper(KEY_SAMPLE_RATE,   C2_PARAMKEY_CODED_SAMPLE_RATE,  "value")
         .limitTo(D::AUDIO & D::CODED));
 
-    add(ConfigMapper(KEY_PCM_ENCODING,  C2_PARAMKEY_PCM_ENCODING,       "value")
+    auto pcmEncodingMapper = [](C2Value v) -> C2Value {
+        int32_t value;
+        C2Config::pcm_encoding_t to;
+        if (v.get(&value) && C2Mapper::map(value, &to)) {
+            return to;
+        }
+        return C2Value();
+    };
+    auto pcmEncodingReverse = [](C2Value v) -> C2Value {
+        C2Config::pcm_encoding_t value;
+        int32_t to;
+        using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(value)>::type;
+        if (v.get((C2ValueType*)&value) && C2Mapper::map(value, &to)) {
+            return to;
+        }
+        return C2Value();
+    };
+    add(ConfigMapper(KEY_PCM_ENCODING,              C2_PARAMKEY_PCM_ENCODING, "value")
         .limitTo(D::AUDIO)
-        .withMappers([](C2Value v) -> C2Value {
-            int32_t value;
-            C2Config::pcm_encoding_t to;
-            if (v.get(&value) && C2Mapper::map(value, &to)) {
-                return to;
-            }
-            return C2Value();
-        }, [](C2Value v) -> C2Value {
-            C2Config::pcm_encoding_t value;
-            int32_t to;
-            using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(value)>::type;
-            if (v.get((C2ValueType*)&value) && C2Mapper::map(value, &to)) {
-                return to;
-            }
-            return C2Value();
-        }));
+        .withMappers(pcmEncodingMapper, pcmEncodingReverse));
+    add(ConfigMapper("android._codec-pcm-encoding", C2_PARAMKEY_PCM_ENCODING, "value")
+        .limitTo(D::AUDIO & D::READ)
+        .withMappers(pcmEncodingMapper, pcmEncodingReverse));
 
     add(ConfigMapper(KEY_IS_ADTS, C2_PARAMKEY_AAC_PACKAGING, "value")
         .limitTo(D::AUDIO & D::CODED)
@@ -897,6 +931,9 @@
     add(ConfigMapper(KEY_MAX_OUTPUT_CHANNEL_COUNT, C2_PARAMKEY_MAX_CHANNEL_COUNT, "value")
         .limitTo(D::AUDIO & (D::CONFIG | D::PARAM | D::READ)));
 
+    add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
+        .limitTo(D::AUDIO & D::DECODER & D::READ));
+
     add(ConfigMapper(KEY_AAC_SBR_MODE, C2_PARAMKEY_AAC_SBR_MODE, "value")
         .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM | D::READ))
         .withMapper([](C2Value v) -> C2Value {
@@ -951,9 +988,39 @@
             return value == 0 ? C2_FALSE : C2_TRUE;
         }));
 
-    /* still to do
-    constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
+    add(ConfigMapper("android._tunnel-peek-set-legacy", C2_PARAMKEY_TUNNEL_PEEK_MODE, "value")
+        .limitTo(D::PARAM & D::VIDEO & D::DECODER)
+        .withMapper([](C2Value v) -> C2Value {
+          int32_t value = 0;
+          (void)v.get(&value);
+          return value == 0
+              ? C2Value(C2PlatformConfig::SPECIFIED_PEEK)
+              : C2Value(C2PlatformConfig::UNSPECIFIED_PEEK);
+        }));
 
+    add(ConfigMapper(KEY_VIDEO_QP_AVERAGE, C2_PARAMKEY_AVERAGE_QP, "value")
+        .limitTo(D::ENCODER & D::VIDEO & D::READ));
+
+    add(ConfigMapper(KEY_PICTURE_TYPE, C2_PARAMKEY_PICTURE_TYPE, "value")
+        .limitTo(D::ENCODER & D::VIDEO & D::READ)
+        .withMappers([](C2Value v) -> C2Value {
+            int32_t sdk;
+            C2Config::picture_type_t c2;
+            if (v.get(&sdk) && C2Mapper::map(sdk, &c2)) {
+                return C2Value(c2);
+            }
+            return C2Value();
+        }, [](C2Value v) -> C2Value {
+            C2Config::picture_type_t c2;
+            int32_t sdk = PICTURE_TYPE_UNKNOWN;
+            using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+            if (v.get((C2ValueType*)&c2) && C2Mapper::map(c2, &sdk)) {
+                return sdk;
+            }
+            return C2Value();
+        }));
+
+    /* still to do
        not yet used by MediaCodec, but defined as MediaFormat
     KEY_AUDIO_SESSION_ID // we use "audio-hw-sync"
     KEY_OUTPUT_REORDER_DEPTH
@@ -1060,6 +1127,13 @@
                     C2_PARAMKEY_SURFACE_SCALING_MODE);
         } else {
             addLocalParam(new C2StreamColorAspectsInfo::input(0u), C2_PARAMKEY_COLOR_ASPECTS);
+
+            if (domain.value == C2Component::DOMAIN_VIDEO) {
+                addLocalParam(new C2AndroidStreamAverageBlockQuantizationInfo::output(0u, 0),
+                              C2_PARAMKEY_AVERAGE_QP);
+                addLocalParam(new C2StreamPictureTypeInfo::output(0u, 0),
+                              C2_PARAMKEY_PICTURE_TYPE);
+            }
         }
     }
 
@@ -1094,6 +1168,17 @@
         }
     }
 
+    // Parameters that are not subscribed initially, but can be subscribed
+    // upon explicit request.
+    static const std::initializer_list<C2Param::Index> kOptionalParams = {
+        C2AndroidStreamAverageBlockQuantizationInfo::output::PARAM_TYPE,
+        C2StreamPictureTypeInfo::output::PARAM_TYPE,
+    };
+    for (const C2Param::Index &index : kOptionalParams) {
+        mSubscribedIndices.erase(index);
+    }
+    subscribeToConfigUpdate(configurable, {}, C2_MAY_BLOCK);
+
     return OK;
 }
 
@@ -1101,15 +1186,21 @@
         const std::shared_ptr<Codec2Client::Configurable> &configurable,
         const std::vector<C2Param::Index> &indices,
         c2_blocking_t blocking) {
+    static const int32_t kProductFirstApiLevel =
+        base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+    static const int32_t kBoardApiLevel =
+        base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+    static const int32_t kFirstApiLevel =
+        (kBoardApiLevel != 0) ? kBoardApiLevel : kProductFirstApiLevel;
     mSubscribedIndices.insert(indices.begin(), indices.end());
-    // TODO: enable this when components no longer crash on this config
-    if (mSubscribedIndices.size() != mSubscribedIndicesSize && false) {
-        std::vector<uint32_t> indices;
+    if (mSubscribedIndices.size() != mSubscribedIndicesSize
+            && kFirstApiLevel >= __ANDROID_API_T__) {
+        std::vector<uint32_t> indicesVector;
         for (C2Param::Index ix : mSubscribedIndices) {
-            indices.push_back(ix);
+            indicesVector.push_back(ix);
         }
         std::unique_ptr<C2SubscribedParamIndicesTuning> subscribeTuning =
-            C2SubscribedParamIndicesTuning::AllocUnique(indices);
+            C2SubscribedParamIndicesTuning::AllocUnique(indicesVector);
         std::vector<std::unique_ptr<C2SettingResult>> results;
         c2_status_t c2Err = configurable->config({ subscribeTuning.get() }, blocking, &results);
         if (c2Err != C2_OK && c2Err != C2_BAD_INDEX) {
@@ -1119,6 +1210,20 @@
         ALOGV("Subscribed to %zu params", mSubscribedIndices.size());
         mSubscribedIndicesSize = mSubscribedIndices.size();
     }
+#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+    ALOGV("subscribed to %zu params:", mSubscribedIndices.size());
+    std::stringstream ss;
+    for (const C2Param::Index &index : mSubscribedIndices) {
+        ss << index << " ";
+        if (ss.str().length() > 70) {
+            ALOGV("%s", ss.str().c_str());
+            std::stringstream().swap(ss);
+        }
+    }
+    if (!ss.str().empty()) {
+        ALOGV("%s", ss.str().c_str());
+    }
+#endif
     return OK;
 }
 
@@ -1143,6 +1248,12 @@
     bool changed = false;
     for (std::unique_ptr<C2Param> &p : configUpdate) {
         if (p && *p) {
+            // Allow unsubscribed vendor parameters to go through --- it may be
+            // later handled by the format shaper.
+            if (!p->isVendor() && mSubscribedIndices.count(p->index()) == 0) {
+                ALOGV("updateConfiguration: skipped unsubscribed param %08x", p->index());
+                continue;
+            }
             auto insertion = mCurrentConfig.emplace(p->index(), nullptr);
             if (insertion.second || *insertion.first->second != *p) {
                 if (mSupportedIndices.count(p->index()) || mLocalParams.count(p->index())) {
@@ -1513,6 +1624,22 @@
             msg->removeEntryAt(msg->findEntryByName("cta861.max-cll"));
             msg->removeEntryAt(msg->findEntryByName("cta861.max-fall"));
         }
+
+        // HDR dynamic info
+        std::string keyPrefix = input ? C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO
+                                      : C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO;
+        std::string typeKey = keyPrefix + ".type";
+        std::string dataKey = keyPrefix + ".data";
+        int32_t type;
+        sp<ABuffer> data;
+        if (msg->findInt32(typeKey.c_str(), &type)
+                && msg->findBuffer(dataKey.c_str(), &data)) {
+            if (type == HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
+                msg->setBuffer(KEY_HDR10_PLUS_INFO, data);
+                msg->removeEntryAt(msg->findEntryByName(typeKey.c_str()));
+                msg->removeEntryAt(msg->findEntryByName(dataKey.c_str()));
+            }
+        }
     }
 
     ALOGV("converted to SDK values as %s", msg->debugString().c_str());
@@ -1596,6 +1723,27 @@
                 params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
             }
         }
+
+        // add HDR format for video encoding
+        if (configDomain == IS_CONFIG) {
+            // don't assume here that transfer is set for HDR, only require it for HLG
+            int transfer = 0;
+            params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+            int profile;
+            if (params->findInt32(KEY_PROFILE, &profile)) {
+                std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+                    C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+                C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+                if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+                    if (c2 == C2Config::hdr_format_t::HLG &&
+                        transfer != COLOR_TRANSFER_HLG) {
+                        c2 = C2Config::hdr_format_t::UNKNOWN;
+                    }
+                    params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+                }
+            }
+        }
     }
 
     {   // reflect temporal layering into a binary blob
@@ -1693,6 +1841,16 @@
                 params->setFloat("cta861.max-fall", meta->sType1.mMaxFrameAverageLightLevel);
             }
         }
+
+        sp<ABuffer> hdrDynamicInfo;
+        if (params->findBuffer(KEY_HDR10_PLUS_INFO, &hdrDynamicInfo)) {
+            for (const std::string &prefix : { C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO,
+                                               C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO }) {
+                params->setInt32((prefix + ".type").c_str(),
+                                 HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+                params->setBuffer((prefix + ".data").c_str(), hdrDynamicInfo);
+            }
+        }
     }
 
     // this is to verify that we set proper signedness for standard parameters
@@ -1885,7 +2043,9 @@
     names->clear();
     // TODO: expand to standard params
     for (const auto &[key, desc] : mVendorParams) {
-        names->push_back(key);
+        if (desc->isVisible()) {
+            names->push_back(key);
+        }
     }
     return OK;
 }
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 417b773..2e7b866 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -148,6 +148,8 @@
     bool mTunneled;
     sp<NativeHandle> mSidebandHandle;
 
+    bool mPushBlankBuffersOnStop;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
@@ -363,11 +365,6 @@
             const std::vector<std::string> &names,
             c2_blocking_t blocking = C2_DONT_BLOCK);
 
-private:
-
-    /// initializes the standard MediaCodec to Codec 2.0 params mapping
-    void initializeStandardParams();
-
     /// Adds indices to the subscribed indices, and updated subscription to component
     /// \param blocking blocking mode to use with the component
     status_t subscribeToConfigUpdate(
@@ -375,6 +372,11 @@
             const std::vector<C2Param::Index> &indices,
             c2_blocking_t blocking = C2_DONT_BLOCK);
 
+private:
+
+    /// initializes the standard MediaCodec to Codec 2.0 params mapping
+    void initializeStandardParams();
+
     /// Gets SDK format from codec 2.0 reflected configuration
     /// \param domain input/output bitmask
     sp<AMessage> getFormatForDomain(
@@ -396,4 +398,3 @@
 }  // namespace android
 
 #endif  // C_CODEC_H_
-
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 4070478..cde4c72 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -16,11 +16,18 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2Buffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
+#include <aidl/android/hardware/graphics/common/Cta861_3.h>
+#include <aidl/android/hardware/graphics/common/Smpte2086.h>
 #include <android-base/properties.h>
 #include <android/hardware/cas/native/1.0/types.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android/hardware/graphics/common/1.2/types.h>
+#include <android/hardware/graphics/mapper/4.0/IMapper.h>
+#include <gralloctypes/Gralloc4.h>
 #include <hidlmemory/FrameworkUtils.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/CodecBase.h>
@@ -224,6 +231,7 @@
           mAllocatedDepth(0),
           mBackBufferSize(0),
           mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+        ATRACE_CALL();
         if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
             mClientColorFormat = COLOR_FormatYUV420Flexible;
         }
@@ -260,6 +268,39 @@
                     mInitCheck = BAD_VALUE;
                     return;
                 }
+                std::optional<int> clientBitDepth = {};
+                switch (mClientColorFormat) {
+                    case COLOR_FormatYUVP010:
+                        clientBitDepth = 10;
+                        break;
+                    case COLOR_FormatYUV411PackedPlanar:
+                    case COLOR_FormatYUV411Planar:
+                    case COLOR_FormatYUV420Flexible:
+                    case COLOR_FormatYUV420PackedPlanar:
+                    case COLOR_FormatYUV420PackedSemiPlanar:
+                    case COLOR_FormatYUV420Planar:
+                    case COLOR_FormatYUV420SemiPlanar:
+                    case COLOR_FormatYUV422Flexible:
+                    case COLOR_FormatYUV422PackedPlanar:
+                    case COLOR_FormatYUV422PackedSemiPlanar:
+                    case COLOR_FormatYUV422Planar:
+                    case COLOR_FormatYUV422SemiPlanar:
+                    case COLOR_FormatYUV444Flexible:
+                    case COLOR_FormatYUV444Interleaved:
+                        clientBitDepth = 8;
+                        break;
+                    default:
+                        // no-op; used with optional
+                        break;
+
+                }
+                // conversion fails if client bit-depth and the component bit-depth differs
+                if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
+                    ALOGD("Bit depth of client: %d and component: %d differs",
+                        *clientBitDepth, bitDepth);
+                    mInitCheck = BAD_VALUE;
+                    return;
+                }
                 C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
                 C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
                 C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
@@ -358,21 +399,22 @@
                         break;
 
                     case COLOR_FormatYUVP010:
+                        // stride is in bytes
                         mediaImage->mPlane[mediaImage->Y].mOffset = 0;
                         mediaImage->mPlane[mediaImage->Y].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
                         mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
 
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride * 2;
+                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
                         mediaImage->mPlane[mediaImage->U].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
                         mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
 
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 2 + 2;
+                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
                         mediaImage->mPlane[mediaImage->V].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
                         mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
                         if (tryWrapping) {
@@ -533,8 +575,8 @@
                 mInitCheck = BAD_VALUE;
                 return;
             }
-            bufferSize += stride * vStride
-                    / plane.rowSampling / plane.colSampling * divUp(mAllocatedDepth, 8u);
+            // stride is in bytes
+            bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
         }
 
         mBackBufferSize = bufferSize;
@@ -575,6 +617,7 @@
      * Copy C2GraphicView to MediaImage2.
      */
     status_t copyToMediaImage() {
+        ATRACE_CALL();
         if (mInitCheck != OK) {
             return mInitCheck;
         }
@@ -613,7 +656,9 @@
         const sp<AMessage> &format,
         const std::shared_ptr<C2GraphicBlock> &block,
         std::function<sp<ABuffer>(size_t)> alloc) {
+    ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
     C2GraphicView view(block->map().get());
+    ATRACE_END();
     if (view.error() != C2_OK) {
         ALOGD("C2GraphicBlock::map failed: %d", view.error());
         return nullptr;
@@ -658,6 +703,7 @@
 }
 
 std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
+    ATRACE_CALL();
     uint32_t width = mView.width();
     uint32_t height = mView.height();
     if (!mWrapped) {
@@ -746,8 +792,10 @@
         ALOGD("C2Buffer precond fail");
         return nullptr;
     }
+    ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
     std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
             buffer->data().graphicBlocks()[0].map().get()));
+    ATRACE_END();
     std::unique_ptr<const C2GraphicView> holder;
 
     GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
@@ -787,8 +835,14 @@
         ALOGD("format had no width / height");
         return nullptr;
     }
-    // NOTE: we currently only support YUV420 formats for byte-buffer mode.
-    sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * 3 / 2));
+    int32_t colorFormat = COLOR_FormatYUV420Flexible;
+    int32_t bpp = 12;  // 8(Y) + 2(U) + 2(V)
+    if (format->findInt32(KEY_COLOR_FORMAT, &colorFormat)) {
+        if (colorFormat == COLOR_FormatYUVP010) {
+            bpp = 24;  // 16(Y) + 4(U) + 4(V)
+        }
+    }
+    sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
     return new ConstGraphicBlockBuffer(
             format,
             aBuffer,
@@ -842,11 +896,13 @@
         return false;
     }
 
+    ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
     GraphicView2MediaImageConverter converter(
             buffer->data().graphicBlocks()[0].map().get(),
             // FIXME: format() is not const, but we cannot change it, so do a const cast here
             const_cast<ConstGraphicBlockBuffer *>(this)->format(),
             true /* copy */);
+    ATRACE_END();
     if (converter.initCheck() != OK) {
         ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
         return false;
@@ -941,4 +997,250 @@
     return const_cast<native_handle_t *>(mBlock->handle());
 }
 
+using ::aidl::android::hardware::graphics::common::Cta861_3;
+using ::aidl::android::hardware::graphics::common::Smpte2086;
+
+using ::android::gralloc4::MetadataType_Cta861_3;
+using ::android::gralloc4::MetadataType_Smpte2086;
+using ::android::gralloc4::MetadataType_Smpte2094_40;
+
+using ::android::hardware::Return;
+using ::android::hardware::hidl_vec;
+
+using Error4 = ::android::hardware::graphics::mapper::V4_0::Error;
+using IMapper4 = ::android::hardware::graphics::mapper::V4_0::IMapper;
+
+namespace {
+
+sp<IMapper4> GetMapper4() {
+    static sp<IMapper4> sMapper = IMapper4::getService();
+    return sMapper;
+}
+
+class Gralloc4Buffer {
+public:
+    Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
+        sp<IMapper4> mapper = GetMapper4();
+        if (!mapper) {
+            return;
+        }
+        // Unwrap raw buffer handle from the C2Handle
+        native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
+        if (!nh) {
+            return;
+        }
+        // Import the raw handle so IMapper can use the buffer. The imported
+        // handle must be freed when the client is done with the buffer.
+        mapper->importBuffer(
+                hardware::hidl_handle(nh),
+                [&](const Error4 &error, void *buffer) {
+                    if (error == Error4::NONE) {
+                        mBuffer = buffer;
+                    }
+                });
+
+        // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+        //         does not clone the fds. Thus we need to delete the handle
+        //         without closing it.
+        native_handle_delete(nh);
+    }
+
+    ~Gralloc4Buffer() {
+        sp<IMapper4> mapper = GetMapper4();
+        if (mapper && mBuffer) {
+            // Free the imported buffer handle. This does not release the
+            // underlying buffer itself.
+            mapper->freeBuffer(mBuffer);
+        }
+    }
+
+    void *get() const { return mBuffer; }
+    operator bool() const { return (mBuffer != nullptr); }
+private:
+    void *mBuffer;
+};
+
+}  // namspace
+
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+        const C2Handle *const handle,
+        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
+    c2_status_t err = C2_OK;
+    sp<IMapper4> mapper = GetMapper4();
+    Gralloc4Buffer buffer(handle);
+    if (!mapper || !buffer) {
+        // Gralloc4 not supported; nothing to do
+        return err;
+    }
+    Error4 mapperErr = Error4::NONE;
+    if (staticInfo) {
+        ALOGV("Grabbing static HDR info from gralloc4 metadata");
+        staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
+        memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
+        (*staticInfo)->maxCll = 0;
+        (*staticInfo)->maxFall = 0;
+        IMapper4::get_cb cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+            mapperErr = err;
+            if (err != Error4::NONE) {
+                return;
+            }
+
+            std::optional<Smpte2086> smpte2086;
+            gralloc4::decodeSmpte2086(vec, &smpte2086);
+            if (smpte2086) {
+                (*staticInfo)->mastering.red.x    = smpte2086->primaryRed.x;
+                (*staticInfo)->mastering.red.y    = smpte2086->primaryRed.y;
+                (*staticInfo)->mastering.green.x  = smpte2086->primaryGreen.x;
+                (*staticInfo)->mastering.green.y  = smpte2086->primaryGreen.y;
+                (*staticInfo)->mastering.blue.x   = smpte2086->primaryBlue.x;
+                (*staticInfo)->mastering.blue.y   = smpte2086->primaryBlue.y;
+                (*staticInfo)->mastering.white.x  = smpte2086->whitePoint.x;
+                (*staticInfo)->mastering.white.y  = smpte2086->whitePoint.y;
+
+                (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
+                (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
+            } else {
+                mapperErr = Error4::BAD_VALUE;
+            }
+        };
+        Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
+        if (!ret.isOk()) {
+            err = C2_REFUSED;
+        } else if (mapperErr != Error4::NONE) {
+            err = C2_CORRUPTED;
+        }
+        cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+            mapperErr = err;
+            if (err != Error4::NONE) {
+                return;
+            }
+
+            std::optional<Cta861_3> cta861_3;
+            gralloc4::decodeCta861_3(vec, &cta861_3);
+            if (cta861_3) {
+                (*staticInfo)->maxCll   = cta861_3->maxContentLightLevel;
+                (*staticInfo)->maxFall  = cta861_3->maxFrameAverageLightLevel;
+            } else {
+                mapperErr = Error4::BAD_VALUE;
+            }
+        };
+        ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
+        if (!ret.isOk()) {
+            err = C2_REFUSED;
+        } else if (mapperErr != Error4::NONE) {
+            err = C2_CORRUPTED;
+        }
+    }
+    if (dynamicInfo) {
+        ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
+        dynamicInfo->reset();
+        IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+            mapperErr = err;
+            if (err != Error4::NONE) {
+                return;
+            }
+            if (!dynamicInfo) {
+                return;
+            }
+            *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
+                    vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+            memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
+        };
+        Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
+        if (!ret.isOk() || mapperErr != Error4::NONE) {
+            dynamicInfo->reset();
+        }
+    }
+
+    return err;
+}
+
+c2_status_t SetHdrMetadataToGralloc4Handle(
+        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+        const C2Handle *const handle) {
+    c2_status_t err = C2_OK;
+    sp<IMapper4> mapper = GetMapper4();
+    Gralloc4Buffer buffer(handle);
+    if (!mapper || !buffer) {
+        // Gralloc4 not supported; nothing to do
+        return err;
+    }
+    if (staticInfo && *staticInfo) {
+        ALOGV("Setting static HDR info as gralloc4 metadata");
+        std::optional<Smpte2086> smpte2086 = Smpte2086{
+            {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
+            {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
+            {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
+            {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
+            staticInfo->mastering.maxLuminance,
+            staticInfo->mastering.minLuminance,
+        };
+        hidl_vec<uint8_t> vec;
+        if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
+                && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
+                && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
+                && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
+                && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
+                && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
+                && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
+                && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
+                && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
+                && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
+            Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
+            if (!ret.isOk()) {
+                err = C2_REFUSED;
+            } else if (ret != Error4::NONE) {
+                err = C2_CORRUPTED;
+            }
+        }
+        std::optional<Cta861_3> cta861_3 = Cta861_3{
+            staticInfo->maxCll,
+            staticInfo->maxFall,
+        };
+        if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
+                && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
+            Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
+            if (!ret.isOk()) {
+                err = C2_REFUSED;
+            } else if (ret != Error4::NONE) {
+                err = C2_CORRUPTED;
+            }
+        }
+    }
+    if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
+        ALOGV("Setting dynamic HDR info as gralloc4 metadata");
+        std::optional<IMapper4::MetadataType> metadataType;
+        switch (dynamicInfo->m.type_) {
+        case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
+            // TODO
+            break;
+        case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40:
+            metadataType = MetadataType_Smpte2094_40;
+            break;
+        }
+
+        if (metadataType) {
+            std::vector<uint8_t> smpte2094_40;
+            smpte2094_40.resize(dynamicInfo->flexCount());
+            memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+
+            hidl_vec<uint8_t> vec;
+            if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
+                Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
+                if (!ret.isOk()) {
+                    err = C2_REFUSED;
+                } else if (ret != Error4::NONE) {
+                    err = C2_CORRUPTED;
+                }
+            }
+        } else {
+            err = C2_BAD_VALUE;
+        }
+    }
+
+    return err;
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index dc788cd..b02b042 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -19,6 +19,7 @@
 #define CODEC2_BUFFER_H_
 
 #include <C2Buffer.h>
+#include <C2Config.h>
 
 #include <binder/IMemory.h>
 #include <media/hardware/VideoAPI.h>
@@ -391,6 +392,36 @@
     int32_t mHeapSeqNum;
 };
 
+/**
+ * Get HDR metadata from Gralloc4 handle.
+ *
+ * \param[in]   handle      handle of the allocation
+ * \param[out]  staticInfo  HDR static info to be filled. Ignored if null;
+ *                          if |handle| is invalid or does not contain the metadata,
+ *                          the shared_ptr is reset.
+ * \param[out]  dynamicInfo HDR dynamic info to be filled. Ignored if null;
+ *                          if |handle| is invalid or does not contain the metadata,
+ *                          the shared_ptr is reset.
+ * \return C2_OK if successful
+ */
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+        const C2Handle *const handle,
+        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
+
+/**
+ * Set HDR metadata to Gralloc4 handle.
+ *
+ * \param[in]   staticInfo  HDR static info to set. Ignored if null or invalid.
+ * \param[in]   dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
+ * \param[out]  handle      handle of the allocation.
+ * \return C2_OK if successful
+ */
+c2_status_t SetHdrMetadataToGralloc4Handle(
+        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+        const C2Handle *const handle);
+
 }  // namespace android
 
 #endif  // CODEC2_BUFFER_H_
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 7c4bfb6..453a0d2 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -33,6 +33,7 @@
 #include <OMX_Video.h>
 #include <OMX_VideoExt.h>
 #include <OMX_AsString.h>
+#include <SurfaceFlingerProperties.sysprop.h>
 
 #include <android/hardware/media/omx/1.0/IOmx.h>
 #include <android/hardware/media/omx/1.0/IOmxObserver.h>
@@ -54,6 +55,9 @@
 
 using Traits = C2Component::Traits;
 
+// HAL pixel format -> framework color format
+typedef std::map<uint32_t, int32_t> PixelFormatMap;
+
 namespace /* unnamed */ {
 
 bool hasPrefix(const std::string& s, const char* prefix) {
@@ -67,6 +71,26 @@
             s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
 }
 
+std::optional<int32_t> findFrameworkColorFormat(
+        const C2FlexiblePixelFormatDescriptorStruct &desc) {
+    switch (desc.bitDepth) {
+        case 8u:
+            if (desc.layout == C2Color::PLANAR_PACKED
+                    || desc.layout == C2Color::SEMIPLANAR_PACKED) {
+                return COLOR_FormatYUV420Flexible;
+            }
+            break;
+        case 10u:
+            if (desc.layout == C2Color::SEMIPLANAR_PACKED) {
+                return COLOR_FormatYUVP010;
+            }
+            break;
+        default:
+            break;
+    }
+    return std::nullopt;
+}
+
 // returns true if component advertised supported profile level(s)
 bool addSupportedProfileLevels(
         std::shared_ptr<Codec2Client::Interface> intf,
@@ -96,9 +120,12 @@
         return false;
     }
 
-    // determine if codec supports HDR
+    // determine if codec supports HDR; imply 10-bit support
     bool supportsHdr = false;
+    // determine if codec supports HDR10Plus; imply 10-bit support
     bool supportsHdr10Plus = false;
+    // determine if codec supports 10-bit format
+    bool supports10Bit = false;
 
     std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
     c2_status_t err1 = intf->querySupportedParams(&paramDescs);
@@ -110,7 +137,9 @@
                 continue;
             }
             switch (type.coreIndex()) {
-            case C2StreamHdr10PlusInfo::CORE_INDEX:
+            case C2StreamHdrDynamicMetadataInfo::CORE_INDEX:
+                [[fallthrough]];
+            case C2StreamHdr10PlusInfo::CORE_INDEX:  // will be deprecated
                 supportsHdr10Plus = true;
                 break;
             case C2StreamHdrStaticInfo::CORE_INDEX:
@@ -122,9 +151,21 @@
         }
     }
 
-    // For VP9/AV1, the static info is always propagated by framework.
+    // VP9 does not support HDR metadata in the bitstream and static metadata
+    // can always be carried by the framework. (The framework does not propagate
+    // dynamic metadata as that needs to be frame accurate.)
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
-    supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
+
+    // HDR support implies 10-bit support. AV1 codecs are also required to
+    // support 10-bit per CDD.
+    // TODO: directly check this from the component interface
+    supports10Bit = (supportsHdr || supportsHdr10Plus) || (mediaType == MIMETYPE_VIDEO_AV1);
+
+    // If the device doesn't support HDR display, then no codec on the device
+    // can advertise support for HDR profiles.
+    // Default to true to maintain backward compatibility
+    auto ret = sysprop::SurfaceFlingerProperties::has_HDR_display();
+    bool hasHDRDisplay = ret.has_value() ? *ret : true;
 
     bool added = false;
 
@@ -151,8 +192,8 @@
         if (mapper && mapper->mapProfile(pl.profile, &sdkProfile)
                 && mapper->mapLevel(pl.level, &sdkLevel)) {
             caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
-            // also list HDR profiles if component supports HDR
-            if (supportsHdr) {
+            // also list HDR profiles if component supports HDR and device has HDR display
+            if (supportsHdr && hasHDRDisplay) {
                 auto hdrMapper = C2Mapper::GetHdrProfileLevelMapper(trait.mediaType);
                 if (hdrMapper && hdrMapper->mapProfile(pl.profile, &sdkProfile)) {
                     caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
@@ -165,6 +206,12 @@
                     }
                 }
             }
+            if (supports10Bit) {
+                auto bitnessMapper = C2Mapper::GetBitDepthProfileLevelMapper(trait.mediaType, 10);
+                if (bitnessMapper && bitnessMapper->mapProfile(pl.profile, &sdkProfile)) {
+                    caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
+                }
+            }
         } else if (!mapper) {
             caps->addProfileLevel(pl.profile, pl.level);
         }
@@ -198,27 +245,78 @@
 void addSupportedColorFormats(
         std::shared_ptr<Codec2Client::Interface> intf,
         MediaCodecInfo::CapabilitiesWriter *caps,
-        const Traits& trait, const std::string &mediaType) {
-    (void)intf;
-
+        const Traits& trait, const std::string &mediaType,
+        const PixelFormatMap &pixelFormatMap) {
     // TODO: get this from intf() as well, but how do we map them to
     // MediaCodec color formats?
     bool encoder = trait.kind == C2Component::KIND_ENCODER;
     if (mediaType.find("video") != std::string::npos
             || mediaType.find("image") != std::string::npos) {
+
+        std::vector<C2FieldSupportedValuesQuery> query;
+        if (encoder) {
+            C2StreamPixelFormatInfo::input pixelFormat;
+            query.push_back(C2FieldSupportedValuesQuery::Possible(
+                    C2ParamField::Make(pixelFormat, pixelFormat.value)));
+        } else {
+            C2StreamPixelFormatInfo::output pixelFormat;
+            query.push_back(C2FieldSupportedValuesQuery::Possible(
+                    C2ParamField::Make(pixelFormat, pixelFormat.value)));
+        }
+        std::list<int32_t> supportedColorFormats;
+        if (intf->querySupportedValues(query, C2_DONT_BLOCK) == C2_OK) {
+            if (query[0].status == C2_OK) {
+                const C2FieldSupportedValues &fsv = query[0].values;
+                if (fsv.type == C2FieldSupportedValues::VALUES) {
+                    for (C2Value::Primitive value : fsv.values) {
+                        auto it = pixelFormatMap.find(value.u32);
+                        if (it != pixelFormatMap.end()) {
+                            auto it2 = std::find(
+                                    supportedColorFormats.begin(),
+                                    supportedColorFormats.end(),
+                                    it->second);
+                            if (it2 == supportedColorFormats.end()) {
+                                supportedColorFormats.push_back(it->second);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+        auto addDefaultColorFormat = [caps, &supportedColorFormats](int32_t colorFormat) {
+            caps->addColorFormat(colorFormat);
+            auto it = std::find(
+                    supportedColorFormats.begin(), supportedColorFormats.end(), colorFormat);
+            if (it != supportedColorFormats.end()) {
+                supportedColorFormats.erase(it);
+            }
+        };
+
+        // The color format is ordered by preference. The intention here is to advertise:
+        //   c2.android.* codecs: YUV420s, Surface, <the rest>
+        //   all other codecs:    Surface, YUV420s, <the rest>
+        // TODO: get this preference via Codec2 API
+
         // vendor video codecs prefer opaque format
         if (trait.name.find("android") == std::string::npos) {
-            caps->addColorFormat(COLOR_FormatSurface);
+            addDefaultColorFormat(COLOR_FormatSurface);
         }
-        caps->addColorFormat(COLOR_FormatYUV420Flexible);
-        caps->addColorFormat(COLOR_FormatYUV420Planar);
-        caps->addColorFormat(COLOR_FormatYUV420SemiPlanar);
-        caps->addColorFormat(COLOR_FormatYUV420PackedPlanar);
-        caps->addColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
-        // framework video encoders must support surface format, though it is unclear
-        // that they will be able to map it if it is opaque
-        if (encoder && trait.name.find("android") != std::string::npos) {
-            caps->addColorFormat(COLOR_FormatSurface);
+        addDefaultColorFormat(COLOR_FormatYUV420Flexible);
+        addDefaultColorFormat(COLOR_FormatYUV420Planar);
+        addDefaultColorFormat(COLOR_FormatYUV420SemiPlanar);
+        addDefaultColorFormat(COLOR_FormatYUV420PackedPlanar);
+        addDefaultColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
+        // Android video codecs prefer CPU-readable formats
+        if (trait.name.find("android") != std::string::npos) {
+            addDefaultColorFormat(COLOR_FormatSurface);
+        }
+
+        static const int kVendorSdkVersion = ::android::base::GetIntProperty(
+                "ro.vendor.build.version.sdk", android_get_device_api_level());
+        if (kVendorSdkVersion >= __ANDROID_API_T__) {
+            for (int32_t colorFormat : supportedColorFormats) {
+                caps->addColorFormat(colorFormat);
+            }
         }
     }
 }
@@ -410,6 +508,7 @@
         }
     }
 
+    std::map<std::string, PixelFormatMap> nameToPixelFormatMap;
     for (const Traits& trait : traits) {
         C2Component::rank_t rank = trait.rank;
 
@@ -423,8 +522,9 @@
         nameAndAliases.insert(nameAndAliases.begin(), trait.name);
         for (const std::string &nameOrAlias : nameAndAliases) {
             bool isAlias = trait.name != nameOrAlias;
+            std::shared_ptr<Codec2Client> client;
             std::shared_ptr<Codec2Client::Interface> intf =
-                Codec2Client::CreateInterfaceByName(nameOrAlias.c_str());
+                Codec2Client::CreateInterfaceByName(nameOrAlias.c_str(), &client);
             if (!intf) {
                 ALOGD("could not create interface for %s'%s'",
                         isAlias ? "alias " : "",
@@ -618,7 +718,40 @@
                         caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
                     }
                 }
-                addSupportedColorFormats(intf, caps.get(), trait, mediaType);
+
+                auto it = nameToPixelFormatMap.find(client->getServiceName());
+                if (it == nameToPixelFormatMap.end()) {
+                    it = nameToPixelFormatMap.try_emplace(client->getServiceName()).first;
+                    PixelFormatMap &pixelFormatMap = it->second;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_420_888] = COLOR_FormatYUV420Flexible;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_P010]    = COLOR_FormatYUVP010;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_1010102]  = COLOR_Format32bitABGR2101010;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_FP16]     = COLOR_Format64bitABGRFloat;
+
+                    std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
+                    std::vector<std::unique_ptr<C2Param>> heapParams;
+                    if (client->query(
+                                {},
+                                {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
+                                C2_MAY_BLOCK,
+                                &heapParams) == C2_OK
+                            && heapParams.size() == 1u) {
+                        pixelFormatInfo.reset(C2StoreFlexiblePixelFormatDescriptorsInfo::From(
+                                heapParams[0].release()));
+                    }
+                    if (pixelFormatInfo && *pixelFormatInfo) {
+                        for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
+                            C2FlexiblePixelFormatDescriptorStruct &desc =
+                                pixelFormatInfo->m.values[i];
+                            std::optional<int32_t> colorFormat = findFrameworkColorFormat(desc);
+                            if (colorFormat) {
+                                pixelFormatMap[desc.pixelFormat] = *colorFormat;
+                            }
+                        }
+                    }
+                }
+                addSupportedColorFormats(
+                        intf, caps.get(), trait, mediaType, it->second);
             }
         }
     }
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index af054c7..cb8b6ab 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -88,8 +88,7 @@
         const sp<MediaCodecBuffer> &buffer,
         std::list<std::unique_ptr<C2Work>> *items) {
     int64_t timeUs;
-    if (buffer->size() == 0u
-            || !buffer->meta()->findInt64("timeUs", &timeUs)) {
+    if (!buffer->meta()->findInt64("timeUs", &timeUs)) {
         return C2_BAD_VALUE;
     }
 
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 92f3754..246e563 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -23,8 +23,8 @@
         "libcodec2-internal-defaults",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/codec2/sfplugin",
+    header_libs: [
+        "libsfplugin_ccodec_internal_headers",
     ],
 
     shared_libs: [
@@ -60,13 +60,10 @@
         "MediaCodec_sanity_test.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/codec2/sfplugin",
-    ],
-
     header_libs: [
         "libmediadrm_headers",
         "libmediametrics_headers",
+        "libsfplugin_ccodec_internal_headers",
     ],
 
     shared_libs: [
diff --git a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
index 41e4fff..a471291 100644
--- a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
@@ -861,4 +861,57 @@
                     + std::to_string(std::get<3>(info.param));
         });
 
+TEST(LinearOutputBuffersTest, PcmConvertFormat) {
+    // Prepare LinearOutputBuffers
+    std::shared_ptr<LinearOutputBuffers> buffers =
+        std::make_shared<LinearOutputBuffers>("test");
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_CHANNEL_COUNT, 1);
+    format->setInt32(KEY_SAMPLE_RATE, 8000);
+    format->setInt32(KEY_PCM_ENCODING, kAudioEncodingPcmFloat);
+    format->setInt32("android._config-pcm-encoding", kAudioEncodingPcm16bit);
+    format->setInt32("android._codec-pcm-encoding", kAudioEncodingPcmFloat);
+    buffers->setFormat(format);
+
+    // Prepare a linear C2Buffer
+    std::shared_ptr<C2BlockPool> pool;
+    ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool));
+
+    std::shared_ptr<C2LinearBlock> block;
+    ASSERT_EQ(OK, pool->fetchLinearBlock(
+            1024, C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block));
+    std::shared_ptr<C2Buffer> c2Buffer =
+        C2Buffer::CreateLinearBuffer(block->share(0, 1024, C2Fence()));
+
+    // Test regular buffer convert
+    size_t index;
+    sp<MediaCodecBuffer> clientBuffer;
+    ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+    int32_t pcmEncoding = 0;
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+    // Test null buffer convert
+    ASSERT_EQ(OK, buffers->registerBuffer(nullptr, &index, &clientBuffer));
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+    // Do the same test in the array mode
+    std::shared_ptr<OutputBuffersArray> array = buffers->toArrayMode(8);
+
+    // Test regular buffer convert
+    ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+    // Test null buffer convert
+    ASSERT_EQ(OK, buffers->registerBuffer(nullptr, &index, &clientBuffer));
+    ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+    EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+    ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+}
+
 } // namespace android
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 7c660dc..3615289 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -224,6 +224,17 @@
                                 Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
                                 mInputBitrate)
                             .build());
+
+                    addParameter(
+                            DefineParam(mOutputProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                            .withDefault(new C2StreamProfileLevelInfo::output(
+                                    0u, PROFILE_UNUSED, LEVEL_UNUSED))
+                            .withFields({
+                                C2F(mOutputProfileLevel, profile).any(),
+                                C2F(mOutputProfileLevel, level).any(),
+                            })
+                            .withSetter(Setter<C2StreamProfileLevelInfo::output>)
+                            .build());
                 }
 
                 // TODO: more SDK params
@@ -241,6 +252,8 @@
             std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
             std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
             std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
+            std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
+            std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
@@ -576,4 +589,51 @@
             << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
 }
 
+typedef std::tuple<std::string, C2Config::profile_t, int32_t> HdrProfilesParams;
+
+class HdrProfilesTest
+    : public CCodecConfigTest,
+      public ::testing::WithParamInterface<HdrProfilesParams> {
+};
+
+TEST_P(HdrProfilesTest, SetFromSdk) {
+    HdrProfilesParams params = GetParam();
+    std::string mediaType = std::get<0>(params);
+    C2Config::profile_t c2Profile = std::get<1>(params);
+    int32_t sdkProfile = std::get<2>(params);
+
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, mediaType.c_str());
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_PROFILE, sdkProfile);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+
+    ASSERT_EQ(1u, configUpdate.size());
+    C2StreamProfileLevelInfo::input *pl =
+        FindParam<std::remove_pointer<decltype(pl)>::type>(configUpdate);
+    ASSERT_NE(nullptr, pl);
+    ASSERT_EQ(c2Profile, pl->profile);
+}
+
+HdrProfilesParams kHdrProfilesParams[] = {
+    std::make_tuple(MIMETYPE_VIDEO_HEVC, PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10),
+    std::make_tuple(MIMETYPE_VIDEO_HEVC, PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_2,        VP9Profile2HDR),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_2,        VP9Profile2HDR10Plus),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_3,        VP9Profile3HDR),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_3,        VP9Profile3HDR10Plus),
+    std::make_tuple(MIMETYPE_VIDEO_AV1,  PROFILE_AV1_0,        AV1ProfileMain10HDR10),
+    std::make_tuple(MIMETYPE_VIDEO_AV1,  PROFILE_AV1_0,        AV1ProfileMain10HDR10Plus),
+};
+
+INSTANTIATE_TEST_SUITE_P(
+        CCodecConfig,
+        HdrProfilesTest,
+        ::testing::ValuesIn(kHdrProfilesParams));
+
 } // namespace android
diff --git a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
index 6738ee7..0be934a 100644
--- a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
+++ b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
@@ -53,7 +53,8 @@
             C2Config::pcm_encoding_t encoding,
             size_t inputFrameSizeInBytes,
             size_t count,
-            size_t expectedOutputSize) {
+            size_t expectedOutputSize,
+            bool separateEos) {
         FrameReassembler frameReassembler;
         frameReassembler.init(
                 mPool,
@@ -67,7 +68,7 @@
 
         size_t inputIndex = 0, outputIndex = 0;
         size_t expectCount = 0;
-        for (size_t i = 0; i < count; ++i) {
+        for (size_t i = 0; i < count + (separateEos ? 1 : 0); ++i) {
             sp<MediaCodecBuffer> buffer = new MediaCodecBuffer(
                     new AMessage, new ABuffer(inputFrameSizeInBytes));
             buffer->setRange(0, inputFrameSizeInBytes);
@@ -77,8 +78,12 @@
             if (i == count - 1) {
                 buffer->meta()->setInt32("eos", 1);
             }
-            for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
-                buffer->base()[j] = (inputIndex & 0xFF);
+            if (i == count && separateEos) {
+                buffer->setRange(0, 0);
+            } else {
+                for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
+                    buffer->base()[j] = (inputIndex & 0xFF);
+                }
             }
             std::list<std::unique_ptr<C2Work>> items;
             ASSERT_EQ(C2_OK, frameReassembler.process(buffer, &items));
@@ -105,7 +110,8 @@
                 ASSERT_EQ(encoderFrameSize * BytesPerSample(encoding), view.capacity());
                 for (size_t j = 0; j < view.capacity(); ++j, ++outputIndex) {
                     ASSERT_TRUE(outputIndex < inputIndex
-                             || inputIndex == inputFrameSizeInBytes * count);
+                             || inputIndex == inputFrameSizeInBytes * count)
+                        << "inputIndex = " << inputIndex << " outputIndex = " << outputIndex;
                     uint8_t expected = outputIndex < inputIndex ? (outputIndex & 0xFF) : 0;
                     if (expectCount < 10) {
                         ++expectCount;
@@ -137,204 +143,239 @@
 // Push frames with exactly the same size as the encoder requested.
 TEST_F(FrameReassemblerTest, PushExactFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
-            10 /* count */,
-            10240 /* expected output size = 10 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
-            10 /* count */,
-            20480 /* expected output size = 10 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
-            10 /* count */,
-            40960 /* expected output size = 10 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 10 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 10 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 10 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with half the size that the encoder requested.
 TEST_F(FrameReassemblerTest, PushHalfFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            5120 /* expected output size = 5 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            10240 /* expected output size = 5 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 5 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                5120 /* expected output size = 5 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 5 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 5 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with twice the size that the encoder requested.
 TEST_F(FrameReassemblerTest, PushDoubleFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 20 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            40960 /* expected output size = 20 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            81920 /* expected output size = 20 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 20 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 20 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                81920 /* expected output size = 20 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with a little bit larger (+5 samples) than the requested size.
 TEST_F(FrameReassemblerTest, PushLittleLargerFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            11264 /* expected output size = 11 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            22528 /* expected output size = 11 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            45056 /* expected output size = 11 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                11264 /* expected output size = 11 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                22528 /* expected output size = 11 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                45056 /* expected output size = 11 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with a little bit smaller (-5 samples) than the requested size.
 TEST_F(FrameReassemblerTest, PushLittleSmallerFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            10240 /* expected output size = 10 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 10 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            40960 /* expected output size = 10 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 10 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 10 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 10 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push single-byte frames
 TEST_F(FrameReassemblerTest, PushSingleByte) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            100352 /* expected output size = 98 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            100352 /* expected output size = 49 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            102400 /* expected output size = 25 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                100352 /* expected output size = 98 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                100352 /* expected output size = 49 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                102400 /* expected output size = 25 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push one big chunk.
 TEST_F(FrameReassemblerTest, PushBigChunk) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            100352 /* expected output size = 98 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            100352 /* expected output size = 49 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            102400 /* expected output size = 25 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                100352 /* expected output size = 98 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                100352 /* expected output size = 49 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                102400 /* expected output size = 25 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 2f4d6b1..54a6fb1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -7,14 +7,28 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_library_headers {
+    name: "libsfplugin_ccodec_utils_headers",
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [ "//apex_available:platform", "com.android.media.swcodec", ],
+
+    export_include_dirs: [
+        ".",
+    ],
+}
+
 cc_library {
     name: "libsfplugin_ccodec_utils",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [ "//apex_available:platform", "com.android.media.swcodec", ],
+
     double_loadable: true,
 
     srcs: [
         "Codec2BufferUtils.cpp",
+        "Codec2CommonUtils.cpp",
         "Codec2Mapper.cpp",
     ],
 
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 5f87c66..807841e 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <libyuv.h>
 
@@ -36,8 +38,8 @@
 namespace {
 
 /**
- * A flippable, optimizable memcpy. Constructs such as (from ? src : dst) do not work as the results are
- * always const.
+ * A flippable, optimizable memcpy. Constructs such as (from ? src : dst)
+ * do not work as the results are always const.
  */
 template<bool ToA, size_t S>
 struct MemCopier {
@@ -88,7 +90,7 @@
         uint32_t planeW = img->mWidth / plane.colSampling;
         uint32_t planeH = img->mHeight / plane.rowSampling;
 
-        bool canCopyByRow = (plane.colInc == 1) && (img->mPlane[i].mColInc == 1);
+        bool canCopyByRow = (plane.colInc == bpp) && (img->mPlane[i].mColInc == bpp);
         bool canCopyByPlane = canCopyByRow && (plane.rowInc == img->mPlane[i].mRowInc);
         if (canCopyByPlane) {
             MemCopier<ToMediaImage, 0>::copy(imgRow, viewRow, plane.rowInc * planeH);
@@ -119,7 +121,10 @@
 }  // namespace
 
 status_t ImageCopy(uint8_t *imgBase, const MediaImage2 *img, const C2GraphicView &view) {
-    if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
+    if (img == nullptr
+        || imgBase == nullptr
+        || view.crop().width != img->mWidth
+        || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
     const uint8_t* src_y = view.data()[0];
@@ -139,15 +144,18 @@
 
     if (IsNV12(view)) {
         if (IsNV12(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
             return OK;
         } else if (IsNV21(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -155,15 +163,18 @@
         }
     } else if (IsNV21(view)) {
         if (IsNV12(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
             return OK;
         } else if (IsI420(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
             if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -171,27 +182,34 @@
         }
     } else if (IsI420(view)) {
         if (IsNV12(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
             if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
             return OK;
         }
     }
+    ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
     return _ImageCopy<true>(view, img, imgBase);
 }
 
 status_t ImageCopy(C2GraphicView &view, const uint8_t *imgBase, const MediaImage2 *img) {
-    if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
+    if (img == nullptr
+        || imgBase == nullptr
+        || view.crop().width != img->mWidth
+        || view.crop().height != img->mHeight) {
         return BAD_VALUE;
     }
     const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
@@ -210,15 +228,18 @@
     int height = view.crop().height;
     if (IsNV12(img)) {
         if (IsNV12(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
             return OK;
         } else if (IsNV21(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -226,15 +247,18 @@
         }
     } else if (IsNV21(img)) {
         if (IsNV12(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
             return OK;
         } else if (IsI420(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
             if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -242,22 +266,26 @@
         }
     } else if (IsI420(img)) {
         if (IsNV12(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
             if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
             return OK;
         }
     }
+    ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
     return _ImageCopy<false>(view, img, imgBase);
 }
 
@@ -510,12 +538,14 @@
 // Matrix coefficient to convert RGB to Planar YUV data.
 // Each sub-array represents the 3X3 coeff used with R, G and B
 static const int16_t bt601Matrix[2][3][3] = {
-    { { 76, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+    { { 77, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
     { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } },  /* RANGE_LIMITED */
 };
 
 static const int16_t bt709Matrix[2][3][3] = {
-    { { 54, 183, 18 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    // TRICKY: 18 is adjusted to 19 so that sum of row 1 is 256
+    { { 54, 183, 19 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+    // TRICKY: -87 is adjusted to -86 so that sum of row 2 is 0
     { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
 };
 
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
new file mode 100644
index 0000000..ef5800d
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
+#include <utils/Log.h>
+
+#include <android/hardware_buffer.h>
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <media/hardware/HardwareAPI.h>
+#include <system/graphics.h>
+
+#include <C2Debug.h>
+
+#include "Codec2CommonUtils.h"
+
+namespace android {
+
+bool isAtLeastT() {
+    char deviceCodeName[PROP_VALUE_MAX];
+    __system_property_get("ro.build.version.codename", deviceCodeName);
+    return android_get_device_api_level() >= __ANDROID_API_T__ ||
+           !strcmp(deviceCodeName, "Tiramisu");
+}
+
+bool isVendorApiOrFirstApiAtLeastT() {
+    // The first SDK the device shipped with.
+    static const int32_t kProductFirstApiLevel =
+        base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+
+    // GRF devices (introduced in Android 11) list the first and possibly the current api levels
+    // to signal which VSR requirements they conform to even if the first device SDK was higher.
+    static const int32_t kBoardFirstApiLevel =
+        base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+    static const int32_t kBoardApiLevel =
+        base::GetIntProperty<int32_t>("ro.board.api_level", 0);
+
+    // For non-GRF devices, use the first SDK version by the product.
+    static const int32_t kFirstApiLevel =
+        kBoardApiLevel != 0 ? kBoardApiLevel :
+        kBoardFirstApiLevel != 0 ? kBoardFirstApiLevel :
+        kProductFirstApiLevel;
+
+    return kFirstApiLevel >= __ANDROID_API_T__;
+}
+
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
+    // HAL_PIXEL_FORMAT_YCBCR_P010 requirement was added in T VSR, although it could have been
+    // supported prior to this.
+    //
+    // Unfortunately, we cannot detect if P010 is properly supported using AHardwareBuffer
+    // API alone. For now limit P010 to devices that launched with Android T or known to conform
+    // to Android T VSR (as opposed to simply limiting to a T vendor image).
+    if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 &&
+            !isVendorApiOrFirstApiAtLeastT()) {
+        return false;
+    }
+
+    const AHardwareBuffer_Desc desc = {
+            .width = 320,
+            .height = 240,
+            .format = format,
+            .layers = 1,
+            .usage = AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
+                     AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
+                     AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
+                     AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY,
+            .stride = 0,
+            .rfu0 = 0,
+            .rfu1 = 0,
+    };
+
+    return AHardwareBuffer_isSupported(&desc);
+}
+
+}  // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
new file mode 100644
index 0000000..98dd65b
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2022, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_UTILS_H_
+#define CODEC2_COMMON_UTILS_H_
+
+#include <android/hardware_buffer.h>
+
+namespace android {
+
+bool isAtLeastT();
+
+bool isVendorApiOrFirstApiAtLeastT();
+
+/**
+ * Check if a given pixel format is supported.
+ * enums listed in android_pixel_format_t, android_pixel_format_v1_1_t
+ * and so on can be passed as these enums have an equivalent definition in
+ * AHardwareBuffer_Format as well.
+ */
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format);
+
+} // namespace android
+
+#endif  // CODEC2_COMMON_UTILS_H_
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 4d939fa..c606d6f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -18,6 +18,9 @@
 #define LOG_TAG "Codec2Mapper"
 #include <utils/Log.h>
 
+#include <map>
+#include <optional>
+
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/stagefright/foundation/ALookup.h>
@@ -167,6 +170,9 @@
     { C2Config::LEVEL_DV_MAIN_UHD_30, DolbyVisionLevelUhd30 },
     { C2Config::LEVEL_DV_MAIN_UHD_48, DolbyVisionLevelUhd48 },
     { C2Config::LEVEL_DV_MAIN_UHD_60, DolbyVisionLevelUhd60 },
+    { C2Config::LEVEL_DV_MAIN_UHD_120, DolbyVisionLevelUhd120 },
+    { C2Config::LEVEL_DV_MAIN_8K_30,  DolbyVisionLevel8k30 },
+    { C2Config::LEVEL_DV_MAIN_8K_60,  DolbyVisionLevel8k60 },
 
     // high tiers are not yet supported on android, for now map them to main tier
     { C2Config::LEVEL_DV_HIGH_HD_24,  DolbyVisionLevelHd24 },
@@ -178,6 +184,9 @@
     { C2Config::LEVEL_DV_HIGH_UHD_30, DolbyVisionLevelUhd30 },
     { C2Config::LEVEL_DV_HIGH_UHD_48, DolbyVisionLevelUhd48 },
     { C2Config::LEVEL_DV_HIGH_UHD_60, DolbyVisionLevelUhd60 },
+    { C2Config::LEVEL_DV_HIGH_UHD_120, DolbyVisionLevelUhd120 },
+    { C2Config::LEVEL_DV_HIGH_8K_30,  DolbyVisionLevel8k30 },
+    { C2Config::LEVEL_DV_HIGH_8K_60,  DolbyVisionLevel8k60 },
 };
 
 ALookup<C2Config::profile_t, int32_t> sDolbyVisionProfiles = {
@@ -255,6 +264,8 @@
     { C2Config::PROFILE_HEVC_MAIN_STILL, HEVCProfileMainStill },
     { C2Config::PROFILE_HEVC_MAIN_INTRA, HEVCProfileMain },
     { C2Config::PROFILE_HEVC_MAIN_10_INTRA, HEVCProfileMain10 },
+    { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10 },
+    { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
 };
 
 ALookup<C2Config::profile_t, int32_t> sHevcHdrProfiles = {
@@ -265,6 +276,13 @@
     { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+    { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+    { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+    { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
 ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
     { C2Config::LEVEL_MP2V_LOW,         MPEG2LevelLL },
     { C2Config::LEVEL_MP2V_MAIN,        MPEG2LevelML },
@@ -354,6 +372,17 @@
     { C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+    { C2Config::hdr_format_t::SDR, VP9Profile0 },
+    { C2Config::hdr_format_t::SDR, VP9Profile1 },
+    { C2Config::hdr_format_t::HLG, VP9Profile2 },
+    { C2Config::hdr_format_t::HLG, VP9Profile3 },
+    { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+    { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+    { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+    { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
 ALookup<C2Config::level_t, int32_t> sAv1Levels = {
     { C2Config::LEVEL_AV1_2,    AV1Level2  },
     { C2Config::LEVEL_AV1_2_1,  AV1Level21 },
@@ -381,15 +410,17 @@
     { C2Config::LEVEL_AV1_7_3,  AV1Level73 },
 };
 
-
 ALookup<C2Config::profile_t, int32_t> sAv1Profiles = {
-    // TODO: will need to disambiguate between Main8 and Main10
     { C2Config::PROFILE_AV1_0, AV1ProfileMain8 },
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::profile_t, int32_t> sAv1TenbitProfiles = {
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
+};
+
 ALookup<C2Config::profile_t, int32_t> sAv1HdrProfiles = {
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
 };
@@ -398,6 +429,44 @@
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+    { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+    { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+    { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+    { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
+// HAL_PIXEL_FORMAT_* -> COLOR_Format*
+ALookup<uint32_t, int32_t> sPixelFormats = {
+    { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
+
+    // YCBCR_420_888 maps to YUV420Flexible and vice versa
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420Flexible },
+
+    // Fallback matches for YCBCR_420_888
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420Planar },
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420SemiPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420PackedPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420PackedSemiPlanar },
+
+    // Fallback matches for YUV420Flexible
+    { HAL_PIXEL_FORMAT_YCRCB_420_SP,           COLOR_FormatYUV420Flexible },
+    { HAL_PIXEL_FORMAT_YV12,                   COLOR_FormatYUV420Flexible },
+
+    { HAL_PIXEL_FORMAT_YCBCR_422_SP,           COLOR_FormatYUV422PackedSemiPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_422_I,            COLOR_FormatYUV422PackedPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_P010,             COLOR_FormatYUVP010 },
+    { HAL_PIXEL_FORMAT_RGBA_1010102,           COLOR_Format32bitABGR2101010 },
+    { HAL_PIXEL_FORMAT_RGBA_FP16,              COLOR_Format64bitABGRFloat },
+};
+
+ALookup<C2Config::picture_type_t, int32_t> sPictureType = {
+    { C2Config::picture_type_t::SYNC_FRAME,     PICTURE_TYPE_I },
+    { C2Config::picture_type_t::I_FRAME,        PICTURE_TYPE_I },
+    { C2Config::picture_type_t::P_FRAME,        PICTURE_TYPE_P },
+    { C2Config::picture_type_t::B_FRAME,        PICTURE_TYPE_B },
+};
+
 /**
  * A helper that passes through vendor extension profile and level values.
  */
@@ -450,6 +519,10 @@
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
         return sAacProfiles.map(from, to);
     }
+    // AAC does not have HDR format
+    virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+        return false;
+    }
 };
 
 struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -480,6 +553,12 @@
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
         return sDolbyVisionProfiles.map(from, to);
     }
+    // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+    // HDR format
+    virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+        *to = C2Config::hdr_format_t::UNKNOWN;
+        return true;
+    }
 };
 
 struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -518,6 +597,9 @@
                      mIsHdr ? sHevcHdrProfiles.map(from, to) :
                               sHevcProfiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sHevcHdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -596,6 +678,9 @@
                      mIsHdr ? sVp9HdrProfiles.map(from, to) :
                               sVp9Profiles.map(from, to);
     }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sVp9HdrFormats.map(from, to);
+    }
 
 private:
     bool mIsHdr;
@@ -603,9 +688,9 @@
 };
 
 struct Av1ProfileLevelMapper : ProfileLevelMapperHelper {
-    Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+    Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false, int32_t bitDepth = 8) :
         ProfileLevelMapperHelper(),
-        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus), mBitDepth(bitDepth) {}
 
     virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
         return sAv1Levels.map(from, to);
@@ -614,23 +699,36 @@
         return sAv1Levels.map(from, to);
     }
     virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
-        return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
-                     mIsHdr ? sAv1HdrProfiles.map(from, to) :
-                              sAv1Profiles.map(from, to);
+        return (mBitDepth == 10) ? sAv1TenbitProfiles.map(from, to) :
+                    mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+                          mIsHdr ? sAv1HdrProfiles.map(from, to) :
+                                   sAv1Profiles.map(from, to);
     }
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
-        return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
-                     mIsHdr ? sAv1HdrProfiles.map(from, to) :
-                              sAv1Profiles.map(from, to);
+        return (mBitDepth == 10) ? sAv1TenbitProfiles.map(from, to) :
+                    mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+                          mIsHdr ? sAv1HdrProfiles.map(from, to) :
+                                   sAv1Profiles.map(from, to);
+    }
+    virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+        return sAv1HdrFormats.map(from, to);
     }
 
 private:
     bool mIsHdr;
     bool mIsHdr10Plus;
+    int32_t mBitDepth;
 };
 
 } // namespace
 
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+    // by default map all (including vendor) profiles to SDR
+    *to = C2Config::hdr_format_t::SDR;
+    return true;
+}
+
 // static
 std::shared_ptr<C2Mapper::ProfileLevelMapper>
 C2Mapper::GetProfileLevelMapper(std::string mediaType) {
@@ -674,6 +772,18 @@
 }
 
 // static
+std::shared_ptr<C2Mapper::ProfileLevelMapper>
+C2Mapper::GetBitDepthProfileLevelMapper(std::string mediaType, int32_t bitDepth) {
+    std::transform(mediaType.begin(), mediaType.end(), mediaType.begin(), ::tolower);
+    if (bitDepth == 8) {
+        return GetProfileLevelMapper(mediaType);
+    } else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
+        return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+    }
+    return nullptr;
+}
+
+// static
 bool C2Mapper::map(C2Config::bitrate_mode_t from, int32_t *to) {
     return sBitrateModes.map(from, to);
 }
@@ -956,41 +1066,29 @@
 // static
 bool C2Mapper::mapPixelFormatFrameworkToCodec(
         int32_t frameworkValue, uint32_t *c2Value) {
-    switch (frameworkValue) {
-        case COLOR_FormatSurface:
-            *c2Value = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-            return true;
-        case COLOR_FormatYUV420Flexible:
-        case COLOR_FormatYUV420Planar:
-        case COLOR_FormatYUV420SemiPlanar:
-        case COLOR_FormatYUV420PackedPlanar:
-        case COLOR_FormatYUV420PackedSemiPlanar:
-            *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
-            return true;
-        default:
-            // Passthrough
-            *c2Value = uint32_t(frameworkValue);
-            return true;
+    if (!sPixelFormats.map(frameworkValue, c2Value)) {
+        // passthrough if not mapped
+        *c2Value = uint32_t(frameworkValue);
     }
+    return true;
 }
 
 // static
 bool C2Mapper::mapPixelFormatCodecToFramework(
         uint32_t c2Value, int32_t *frameworkValue) {
-    switch (c2Value) {
-        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-            *frameworkValue = COLOR_FormatSurface;
-            return true;
-        case HAL_PIXEL_FORMAT_YCBCR_422_SP:
-        case HAL_PIXEL_FORMAT_YCRCB_420_SP:
-        case HAL_PIXEL_FORMAT_YCBCR_422_I:
-        case HAL_PIXEL_FORMAT_YCBCR_420_888:
-        case HAL_PIXEL_FORMAT_YV12:
-            *frameworkValue = COLOR_FormatYUV420Flexible;
-            return true;
-        default:
-            // Passthrough
-            *frameworkValue = int32_t(c2Value);
-            return true;
+    if (!sPixelFormats.map(c2Value, frameworkValue)) {
+        // passthrough if not mapped
+        *frameworkValue = int32_t(c2Value);
     }
+    return true;
+}
+
+// static
+bool C2Mapper::map(C2Config::picture_type_t from, int32_t *to) {
+    return sPictureType.map(from, to);
+}
+
+// static
+bool C2Mapper::map(int32_t from, C2Config::picture_type_t *to) {
+    return sPictureType.map(from, to);
 }
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 797c8a8..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
             virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
             virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
             virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+            /**
+             * Mapper method that maps a MediaCodec profile to the supported
+             * HDR format for that profile. Since 10-bit profiles are used for
+             * HLG, this method will return HLG for all 10-bit profiles, but
+             * the caller should also verify that the transfer function is
+             * indeed HLG.
+             */
+            // not an abstract method as we have a default implementation for SDR
+            virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
             virtual ~ProfileLevelMapper() = default;
         };
 
@@ -43,6 +53,9 @@
         static std::shared_ptr<ProfileLevelMapper>
         GetHdrProfileLevelMapper(std::string mediaType, bool isHdr10Plus = false);
 
+        static std::shared_ptr<ProfileLevelMapper>
+        GetBitDepthProfileLevelMapper(std::string mediaType, int32_t bitDepth = 8);
+
         // convert between bitrates
         static bool map(C2Config::bitrate_mode_t, int32_t*);
         static bool map(int32_t, C2Config::bitrate_mode_t*);
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index b858fa5..9c3ba4d 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -15,12 +15,9 @@
         "C2Param_test.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/codec2/vndk/include",
-    ],
-
     header_libs: [
         "libcodec2_headers",
+        "libcodec2_vndk_headers",
     ],
 
     // param tests must not depend on any codec2 libraries as all params should be templated
@@ -47,9 +44,6 @@
         "vndk/C2BufferTest.cpp",
     ],
 
-    include_dirs: [
-    ],
-
     shared_libs: [
         "libcodec2",
         "libcodec2_vndk",
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index be81c84..1d8aea3 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -18,6 +18,25 @@
     vendor_available: true,
 
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
+
+cc_library_headers {
+    name: "libcodec2_vndk_headers",
+    vendor_available: true,
+    min_sdk_version: "29",
+
+    export_include_dirs: [
+        "include",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 }
 
 // !!!DO NOT DEPEND ON THIS SHARED LIBRARY DIRECTLY!!!
@@ -28,6 +47,11 @@
     min_sdk_version: "29",
     // TODO: b/147147883
     double_loadable: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
 
     srcs: [
         "C2AllocatorBlob.cpp",
@@ -54,6 +78,7 @@
 
     export_shared_lib_headers: [
         "libbase",
+        "libdmabufheap",
         "android.hardware.media.bufferpool@2.0",
     ],
 
@@ -73,11 +98,11 @@
         "libbase",
         "libcutils",
         "libdl",
-        "libhardware",
-        "libhidlbase",
-        "libion",
         "libdmabufheap",
         "libfmq",
+        "libgralloctypes",
+        "libhidlbase",
+        "libion",
         "liblog",
         "libnativewindow",
         "libstagefright_foundation",
@@ -92,6 +117,43 @@
     ],
 }
 
+// public dependency for statically linking to libcodec2_vndk for unit tests
+cc_defaults {
+    name: "libcodec2-static-defaults",
+
+    static_libs: [
+        "liblog",
+        "libion",
+        "libfmq",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcodec2",
+        "libhidlbase",
+        "libdmabufheap",
+        "libcodec2_vndk",
+        "libnativewindow",
+        "libcodec2_soft_common",
+        "libsfplugin_ccodec_utils",
+        "libstagefright_foundation",
+        "libstagefright_bufferpool@2.0.1",
+        "libgralloctypes",
+        "android.hardware.graphics.mapper@2.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.allocator@3.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+    ],
+
+    shared_libs: [
+        "libui",
+        "libdl",
+        "libvndksupport",
+        "libprocessgroup",
+    ],
+}
+
 // public dependency for implementing Codec 2 components
 cc_defaults {
     name: "libcodec2-impl-defaults",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 6a7f19c..bc4053d 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -20,8 +20,10 @@
 
 #include <mutex>
 
+#include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
 #include <android/hardware/graphics/common/1.2/types.h>
 #include <cutils/native_handle.h>
+#include <gralloctypes/Gralloc4.h>
 #include <hardware/gralloc.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
@@ -29,6 +31,7 @@
 
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
+#include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
 using ::android::hardware::hidl_handle;
@@ -230,8 +233,90 @@
     }
 };
 
+static
+c2_status_t Gralloc4Mapper_lock(native_handle_t *handle, uint64_t usage, const Rect& bounds,
+        C2PlanarLayout *layout, uint8_t **addr) {
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+    std::vector<ui::PlaneLayout> planes;
+    // this method is only supported on Gralloc 4 or later
+    status_t err = mapper.getPlaneLayouts(handle, &planes);
+    if (err != NO_ERROR || planes.empty()) {
+        return C2_CANNOT_DO;
+    }
+
+    uint8_t *pointer = nullptr;
+    err = mapper.lock(handle, usage, bounds, (void **)&pointer, nullptr, nullptr);
+    if (err != NO_ERROR || pointer == nullptr) {
+        return C2_CORRUPTED;
+    }
+
+    using aidl::android::hardware::graphics::common::PlaneLayoutComponentType;
+    using aidl::android::hardware::graphics::common::PlaneLayoutComponent;
+
+    layout->type = C2PlanarLayout::TYPE_YUV;
+    layout->numPlanes = 0;
+    layout->rootPlanes = 0;
+
+    for (const ui::PlaneLayout &plane : planes) {
+        layout->rootPlanes++;
+        uint32_t lastOffsetInBits = 0;
+        uint32_t rootIx = layout->numPlanes;
+
+        for (const PlaneLayoutComponent &component : plane.components) {
+            if (!gralloc4::isStandardPlaneLayoutComponentType(component.type)) {
+                mapper.unlock(handle);
+                return C2_CANNOT_DO;
+            }
+
+            uint32_t rightShiftBits = component.offsetInBits - lastOffsetInBits;
+            uint32_t allocatedDepthInBits = component.sizeInBits + rightShiftBits;
+            C2PlanarLayout::plane_index_t planeId;
+            C2PlaneInfo::channel_t channel;
+
+            switch (static_cast<PlaneLayoutComponentType>(component.type.value)) {
+                case PlaneLayoutComponentType::Y:
+                    planeId = C2PlanarLayout::PLANE_Y;
+                    channel = C2PlaneInfo::CHANNEL_Y;
+                    break;
+                case PlaneLayoutComponentType::CB:
+                    planeId = C2PlanarLayout::PLANE_U;
+                    channel = C2PlaneInfo::CHANNEL_CB;
+                    break;
+                case PlaneLayoutComponentType::CR:
+                    planeId = C2PlanarLayout::PLANE_V;
+                    channel = C2PlaneInfo::CHANNEL_CR;
+                    break;
+                default:
+                    mapper.unlock(handle);
+                    return C2_CORRUPTED;
+            }
+
+            addr[planeId] = pointer + plane.offsetInBytes + (component.offsetInBits / 8);
+            layout->planes[planeId] = {
+                channel,                                                // channel
+                static_cast<int32_t>(plane.sampleIncrementInBits / 8),  // colInc
+                static_cast<int32_t>(plane.strideInBytes),              // rowInc
+                static_cast<uint32_t>(plane.horizontalSubsampling),     // mColSampling
+                static_cast<uint32_t>(plane.verticalSubsampling),       // mRowSampling
+                allocatedDepthInBits,                                   // allocatedDepth (bits)
+                static_cast<uint32_t>(component.sizeInBits),            // bitDepth (bits)
+                rightShiftBits,                                         // rightShift (bits)
+                C2PlaneInfo::NATIVE,                                    // endianness
+                rootIx,                                                 // rootIx
+                static_cast<uint32_t>(component.offsetInBits / 8),      // offset (bytes)
+            };
+
+            layout->numPlanes++;
+            lastOffsetInBits = component.offsetInBits + component.sizeInBits;
+        }
+    }
+    return C2_OK;
+}
+
 } // unnamed namespace
 
+
 native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
     return C2HandleGralloc::UnwrapNativeHandle(handle);
 }
@@ -385,6 +470,10 @@
                 mBuffer, mWidth, mHeight, mFormat, mGrallocUsage,
                 mStride, generation, igbp_id, igbp_slot);
     }
+
+    // 'NATIVE' on Android means LITTLE_ENDIAN
+    constexpr C2PlaneInfo::endianness_t kEndianness = C2PlaneInfo::NATIVE;
+
     switch (mFormat) {
         case static_cast<uint32_t>(PixelFormat4::RGBA_1010102): {
             // TRICKY: this is used for media as YUV444 in the case when it is queued directly to a
@@ -609,17 +698,6 @@
                 C2PlanarLayout::PLANE_V,          // rootIx
                 0,                                // offset
             };
-            // handle interleaved formats
-            intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
-            if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
-                layout->rootPlanes = 2;
-                layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
-                layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
-            } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
-                layout->rootPlanes = 2;
-                layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
-                layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
-            }
             break;
         }
 
@@ -646,7 +724,7 @@
                 16,                             // allocatedDepth
                 10,                             // bitDepth
                 6,                              // rightShift
-                C2PlaneInfo::LITTLE_END,        // endianness
+                kEndianness,                    // endianness
                 C2PlanarLayout::PLANE_Y,        // rootIx
                 0,                              // offset
             };
@@ -659,7 +737,7 @@
                 16,                             // allocatedDepth
                 10,                             // bitDepth
                 6,                              // rightShift
-                C2PlaneInfo::LITTLE_END,        // endianness
+                kEndianness,                    // endianness
                 C2PlanarLayout::PLANE_U,        // rootIx
                 0,                              // offset
             };
@@ -672,7 +750,7 @@
                 16,                             // allocatedDepth
                 10,                             // bitDepth
                 6,                              // rightShift
-                C2PlaneInfo::LITTLE_END,        // endianness
+                kEndianness,                    // endianness
                 C2PlanarLayout::PLANE_U,        // rootIx
                 2,                              // offset
             };
@@ -680,9 +758,15 @@
         }
 
         default: {
-            // We don't know what it is, but let's try to lock it.
+            // We don't know what it is, let's try to lock it with gralloc4
             android_ycbcr ycbcrLayout;
+            c2_status_t status = Gralloc4Mapper_lock(
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+            if (status == C2_OK) {
+                break;
+            }
 
+            // fallback to lockYCbCr
             status_t err = GraphicBufferMapper::get().lockYCbCr(
                     const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
             if (err == OK && ycbcrLayout.y && ycbcrLayout.cb && ycbcrLayout.cr
@@ -734,17 +818,6 @@
                     C2PlanarLayout::PLANE_V,          // rootIx
                     0,                                // offset
                 };
-                // handle interleaved formats
-                intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
-                if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
-                    layout->rootPlanes = 2;
-                    layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
-                    layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
-                } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
-                    layout->rootPlanes = 2;
-                    layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
-                    layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
-                }
                 break;
             }
 
@@ -790,6 +863,29 @@
     }
     mLocked = true;
 
+    // handle interleaved formats
+    if (layout->type == C2PlanarLayout::TYPE_YUV && layout->rootPlanes == 3) {
+        intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
+        intptr_t uvColInc = layout->planes[C2PlanarLayout::PLANE_U].colInc;
+        if (uvOffset > 0 && uvOffset < uvColInc) {
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
+            layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
+        } else if (uvOffset < 0 && uvOffset > -uvColInc) {
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
+            layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
+        }
+    }
+
+    ALOGV("C2AllocationGralloc::map: layout: type=%d numPlanes=%d rootPlanes=%d",
+          layout->type, layout->numPlanes, layout->rootPlanes);
+    for (int i = 0; i < layout->numPlanes; ++i) {
+        const C2PlaneInfo &plane = layout->planes[i];
+        ALOGV("C2AllocationGralloc::map: plane[%d]: colInc=%d rowInc=%d rootIx=%u offset=%u",
+              i, plane.colInc, plane.rowInc, plane.rootIx, plane.offset);
+    }
+
     return C2_OK;
 }
 
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 9c5183e..0b556aa 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -16,13 +16,24 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2FenceFactory"
+#include <cutils/native_handle.h>
 #include <utils/Log.h>
+#include <ui/Fence.h>
 
 #include <C2FenceFactory.h>
 #include <C2SurfaceSyncObj.h>
 
+#define MAX_FENCE_FDS 1
+
 class C2Fence::Impl {
 public:
+    enum type_t : uint32_t {
+        INVALID_FENCE,
+        NULL_FENCE,
+        SURFACE_FENCE,
+        SYNC_FENCE,
+    };
+
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
 
     virtual bool valid() const = 0;
@@ -33,9 +44,26 @@
 
     virtual bool isHW() const = 0;
 
+    virtual type_t type() const = 0;
+
+    /**
+     * Create a native handle for the fence so it can be marshalled.
+     * The native handle must store fence type in the first integer.
+     *
+     * \return a valid native handle if the fence can be marshalled, otherwise return null.
+     */
+    virtual native_handle_t *createNativeHandle() const = 0;
+
     virtual ~Impl() = default;
 
     Impl() = default;
+
+    static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
+        if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
+            return static_cast<type_t>(nh->data[nh->numFds]);
+        }
+        return INVALID_FENCE;
+    }
 };
 
 c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
@@ -115,6 +143,15 @@
         return false;
     }
 
+    virtual type_t type() const {
+        return SURFACE_FENCE;
+    }
+
+    virtual native_handle_t *createNativeHandle() const {
+        ALOGD("Cannot create native handle from surface fence");
+        return nullptr;
+    }
+
     virtual ~SurfaceFenceImpl() {};
 
     SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
@@ -143,3 +180,120 @@
     }
     return C2Fence();
 }
+
+using namespace android;
+
+class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
+public:
+    virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+        c2_nsecs_t timeoutMs = timeoutNs / 1000;
+        if (timeoutMs > INT_MAX) {
+            timeoutMs = INT_MAX;
+        }
+
+        switch (mFence->wait((int)timeoutMs)) {
+            case NO_ERROR:
+                return C2_OK;
+            case -ETIME:
+                return C2_TIMED_OUT;
+            default:
+                return C2_CORRUPTED;
+        }
+    }
+
+    virtual bool valid() const {
+        return mFence->getStatus() != Fence::Status::Invalid;
+    }
+
+    virtual bool ready() const {
+        return mFence->getStatus() == Fence::Status::Signaled;
+    }
+
+    virtual int fd() const {
+        return mFence->dup();
+    }
+
+    virtual bool isHW() const {
+        return true;
+    }
+
+    virtual type_t type() const {
+        return SYNC_FENCE;
+    }
+
+    virtual native_handle_t *createNativeHandle() const {
+        native_handle_t* nh = native_handle_create(1, 1);
+        if (!nh) {
+            ALOGE("Failed to allocate native handle for sync fence");
+            return nullptr;
+        }
+        nh->data[0] = fd();
+        nh->data[1] = type();
+        return nh;
+    }
+
+    virtual ~SyncFenceImpl() {};
+
+    SyncFenceImpl(int fenceFd) :
+            mFence(sp<Fence>::make(fenceFd)) {}
+
+    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
+        if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+            ALOGE("Invalid handle for sync fence");
+            return nullptr;
+        }
+        int fd = dup(nh->data[0]);
+        std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl");
+            close(fd);
+        }
+        return p;
+    }
+
+private:
+    const sp<Fence> mFence;
+};
+
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+    std::shared_ptr<C2Fence::Impl> p;
+    if (fenceFd >= 0) {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
+        if (!p) {
+            ALOGE("Failed to allocate sync fence impl");
+            close(fenceFd);
+        }
+        if (!p->valid()) {
+            p.reset();
+        }
+    } else {
+        ALOGE("Create sync fence from invalid fd");
+    }
+    return C2Fence(p);
+}
+
+native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
+    return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
+}
+
+C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+    if (!handle) {
+        return C2Fence();
+    }
+    C2Fence::Impl::type_t type = C2Fence::Impl::GetTypeFromNativeHandle(handle);
+    std::shared_ptr<C2Fence::Impl> p;
+    switch (type) {
+        case C2Fence::Impl::SYNC_FENCE:
+            p = SyncFenceImpl::CreateFromNativeHandle(handle);
+            break;
+        default:
+            ALOGD("Unsupported fence type %d", type);
+            // return a null-fence in this case
+            break;
+    }
+    if (p && !p->valid()) {
+        p.reset();
+    }
+    return C2Fence(p);
+}
+
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index b2636e9..29aad5e 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -101,6 +101,8 @@
             uint32_t generationId,
             uint64_t consumerUsage);
 
+    virtual void getConsumerUsage(uint64_t *consumerUsage);
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
@@ -127,8 +129,9 @@
     // Create a local BlockPoolData.
     C2BufferQueueBlockPoolData(
             uint32_t generation, uint64_t bqId, int32_t bqSlot,
+            const std::shared_ptr<int> &owner,
             const android::sp<HGraphicBufferProducer>& producer,
-            std::shared_ptr<C2SurfaceSyncMemory>, int noUse);
+            std::shared_ptr<C2SurfaceSyncMemory>);
 
     virtual ~C2BufferQueueBlockPoolData() override;
 
@@ -138,7 +141,6 @@
                 uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
                 android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
                 std::shared_ptr<C2SurfaceSyncMemory> syncMem);
-
 private:
     friend struct _C2BlockFactory;
 
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index d4bed26..4944115 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -28,6 +28,7 @@
 struct _C2FenceFactory {
 
     class SurfaceFenceImpl;
+    class SyncFenceImpl;
 
     /*
      * Create C2Fence for BufferQueueBased blockpool.
@@ -38,6 +39,30 @@
     static C2Fence CreateSurfaceFence(
             std::shared_ptr<C2SurfaceSyncMemory> syncMem,
             uint32_t waitId);
+
+    /*
+     * Create C2Fence from a fence file fd.
+     *
+     * \param fenceFd           Fence file descriptor.
+     *                          It will be owned and closed by the returned fence object.
+     */
+    static C2Fence CreateSyncFence(int fenceFd);
+
+    /**
+     * Create a native handle from fence for marshalling
+     *
+     * \return a non-null pointer if the fence can be marshalled, otherwise return nullptr
+     */
+    static native_handle_t* CreateNativeHandle(const C2Fence& fence);
+
+    /*
+     * Create C2Fence from a native handle.
+
+     * \param handle           A native handle representing a fence
+     *                         The fd in the native handle will be duplicated, so the caller will
+     *                         still own the handle and have to close it.
+     */
+    static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
 };
 
 
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 169de0c..e67e42f 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2BqBuffer"
+#include <android/hardware_buffer.h>
 #include <utils/Log.h>
 
 #include <ui/BufferQueueDefs.h>
@@ -171,6 +172,91 @@
     return stamp;
 }
 
+// Do not rely on AHardwareBuffer module for GraphicBuffer handling since AHardwareBuffer
+// module is linked to framework which could have a different implementation of GraphicBuffer
+// than mainline/vndk implementation.(See b/203347494.)
+//
+// b2h/h2b between HardwareBuffer and GraphicBuffer cannot be used. (b2h/h2b depend on
+// AHardwareBuffer module for the conversion between HardwareBuffer and GraphicBuffer.)
+// hgbp_ prefixed methods are added to be used instead of b2h/h2b.
+//
+// TODO: Remove dependency with existing AHwB module. Also clean up conversions.(conversions here
+// and h2b/b2h coversions)
+const GraphicBuffer* hgbp_AHBuffer_to_GraphicBuffer(const AHardwareBuffer* buffer) {
+    return GraphicBuffer::fromAHardwareBuffer(buffer);
+}
+
+int hgbp_createFromHandle(const AHardwareBuffer_Desc* desc,
+                                     const native_handle_t* handle,
+                                     sp<GraphicBuffer> *outBuffer) {
+
+    if (!desc || !handle || !outBuffer) return ::android::BAD_VALUE;
+    if (desc->rfu0 != 0 || desc->rfu1 != 0) return ::android::BAD_VALUE;
+    if (desc->format == AHARDWAREBUFFER_FORMAT_BLOB && desc->height != 1)
+        return ::android::BAD_VALUE;
+
+    const int format = uint32_t(desc->format);
+    const uint64_t usage = uint64_t(desc->usage);
+    sp<GraphicBuffer> gbuffer(new GraphicBuffer(handle,
+                                                GraphicBuffer::HandleWrapMethod::CLONE_HANDLE,
+                                                desc->width, desc->height,
+                                                format, desc->layers, usage, desc->stride));
+    status_t err = gbuffer->initCheck();
+    if (err != 0 || gbuffer->handle == 0) return err;
+
+    *outBuffer = gbuffer;
+
+    return ::android::NO_ERROR;
+}
+
+void hgbp_describe(const AHardwareBuffer* buffer,
+        AHardwareBuffer_Desc* outDesc) {
+    if (!buffer || !outDesc) return;
+
+    const GraphicBuffer* gbuffer = hgbp_AHBuffer_to_GraphicBuffer(buffer);
+
+    outDesc->width = gbuffer->getWidth();
+    outDesc->height = gbuffer->getHeight();
+    outDesc->layers = gbuffer->getLayerCount();
+    outDesc->format = uint32_t(gbuffer->getPixelFormat());
+    outDesc->usage = uint64_t(gbuffer->getUsage());
+    outDesc->stride = gbuffer->getStride();
+    outDesc->rfu0 = 0;
+    outDesc->rfu1 = 0;
+}
+
+
+bool hgbp_h2b(HBuffer const& from, sp<GraphicBuffer>* to) {
+    AHardwareBuffer_Desc const* desc =
+            reinterpret_cast<AHardwareBuffer_Desc const*>(
+            from.description.data());
+    native_handle_t const* handle = from.nativeHandle;
+    if (hgbp_createFromHandle(desc, handle, to) != ::android::OK) {
+        return false;
+    }
+    return true;
+}
+
+bool hgbp_b2h(sp<GraphicBuffer> const& from, HBuffer* to,
+         uint32_t* toGenerationNumber) {
+    if (!from) {
+        return false;
+    }
+    AHardwareBuffer* hwBuffer = from->toAHardwareBuffer();
+    to->nativeHandle.setTo(
+          const_cast<native_handle_t*>(from->handle),
+          false);
+    hgbp_describe(
+            hwBuffer,
+            reinterpret_cast<AHardwareBuffer_Desc*>(to->description.data()));
+    if (toGenerationNumber) {
+        *toGenerationNumber = from->getGenerationNumber();
+    }
+    return true;
+}
+
+// End of hgbp methods for GraphicBuffer creation.
+
 bool getGenerationNumberAndUsage(const sp<HGraphicBufferProducer> &producer,
                                  uint32_t *generation, uint64_t *usage) {
     status_t status{};
@@ -211,7 +297,7 @@
                     HBuffer const& hBuffer,
                     uint32_t generationNumber){
                 if (h2b(hStatus, &status) &&
-                        h2b(hBuffer, &slotBuffer) &&
+                        hgbp_h2b(hBuffer, &slotBuffer) &&
                         slotBuffer) {
                     *generation = generationNumber;
                     *usage = slotBuffer->getUsage();
@@ -402,7 +488,7 @@
                             HBuffer const& hBuffer,
                             uint32_t generationNumber){
                         if (h2b(hStatus, &status) &&
-                                h2b(hBuffer, &slotBuffer) &&
+                                hgbp_h2b(hBuffer, &slotBuffer) &&
                                 slotBuffer) {
                             slotBuffer->setGenerationNumber(generationNumber);
                             outGeneration = generationNumber;
@@ -456,7 +542,7 @@
                         std::make_shared<C2BufferQueueBlockPoolData>(
                                 slotBuffer->getGenerationNumber(),
                                 mProducerId, slot,
-                                mProducer, mSyncMem, 0);
+                                mIgbpValidityToken, mProducer, mSyncMem);
                 mPoolDatas[slot] = poolData;
                 *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
                 return C2_OK;
@@ -485,19 +571,13 @@
 public:
     Impl(const std::shared_ptr<C2Allocator> &allocator)
         : mInit(C2_OK), mProducerId(0), mGeneration(0),
-          mDqFailure(0), mLastDqTs(0), mLastDqLogTs(0),
-          mAllocator(allocator) {
+          mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
+          mLastDqLogTs(0), mAllocator(allocator), mIgbpValidityToken(std::make_shared<int>(0)) {
     }
 
     ~Impl() {
-        bool noInit = false;
+        mIgbpValidityToken.reset();
         for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
-            if (!noInit && mProducer) {
-                Return<HStatus> transResult =
-                        mProducer->detachBuffer(static_cast<int32_t>(i));
-                noInit = !transResult.isOk() ||
-                         static_cast<HStatus>(transResult) == HStatus::NO_INIT;
-            }
             mBuffers[i].clear();
         }
     }
@@ -539,7 +619,7 @@
             }
             std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                     std::make_shared<C2BufferQueueBlockPoolData>(
-                            0, (uint64_t)0, ~0, nullptr, nullptr, 0);
+                            0, (uint64_t)0, ~0, nullptr, nullptr, nullptr);
             *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
             ALOGV("allocated a buffer successfully");
 
@@ -606,15 +686,6 @@
         {
             sp<GraphicBuffer> buffers[NUM_BUFFER_SLOTS];
             std::scoped_lock<std::mutex> lock(mMutex);
-            bool noInit = false;
-            for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
-                if (!noInit && mProducer) {
-                    Return<HStatus> transResult =
-                            mProducer->detachBuffer(static_cast<int32_t>(i));
-                    noInit = !transResult.isOk() ||
-                             static_cast<HStatus>(transResult) == HStatus::NO_INIT;
-                }
-            }
             int32_t oldGeneration = mGeneration;
             if (producer) {
                 mProducer = producer;
@@ -624,8 +695,7 @@
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
-                ALOGW("invalid producer producer(%d), generation(%d)",
-                      (bool)producer, bqInformation);
+                ALOGD("configuring null producer: igbp_information(%d)", bqInformation);
             }
             oldMem = mSyncMem; // preven destruction while locked.
             mSyncMem = c2SyncMem;
@@ -650,6 +720,10 @@
                         }
                     }
                 }
+            } else {
+                // old buffers should not be cancelled since the associated IGBP
+                // is no longer valid.
+                mIgbpValidityToken = std::make_shared<int>(0);
             }
             for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
                 mBuffers[i] = buffers[i];
@@ -661,6 +735,11 @@
                   "bqId: %llu migrated buffers # %d",
                   generation, (unsigned long long)producerId, migrated);
         }
+        mConsumerUsage = usage;
+    }
+
+    void getConsumerUsage(uint64_t *consumeUsage) {
+        *consumeUsage = mConsumerUsage;
     }
 
 private:
@@ -669,6 +748,7 @@
     c2_status_t mInit;
     uint64_t mProducerId;
     uint32_t mGeneration;
+    uint64_t mConsumerUsage;
     OnRenderCallback mRenderCallback;
 
     size_t mDqFailure;
@@ -685,6 +765,20 @@
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
 
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+
+    // IGBP invalidation notification token.
+    // The buffers(C2BufferQueueBlockPoolData) has the reference to the IGBP where
+    // they belong in order to call IGBP::cancelBuffer() when they are of no use.
+    //
+    // In certain cases, IGBP is no longer used by this class(actually MediaCodec)
+    // any more and the situation needs to be addressed quickly. In order to
+    // achieve those, std::shared_ptr<> is used as a token for quick IGBP invalidation
+    // notification from the buffers.
+    //
+    // The buffer side will have the reference of the token as std::weak_ptr<>.
+    // if the token has been expired, the buffers will not call IGBP::cancelBuffer()
+    // when they are no longer used.
+    std::shared_ptr<int> mIgbpValidityToken;
 };
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
@@ -700,14 +794,14 @@
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
         uint32_t generation, uint64_t bqId, int32_t bqSlot,
+        const std::shared_ptr<int>& owner,
         const android::sp<HGraphicBufferProducer>& producer,
-        std::shared_ptr<C2SurfaceSyncMemory> syncMem, int noUse) :
+        std::shared_ptr<C2SurfaceSyncMemory> syncMem) :
         mLocal(true), mHeld(true),
         mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
         mCurrentGeneration(generation), mCurrentBqId(bqId),
         mTransfer(false), mAttach(false), mDisplay(false),
-        mIgbp(producer), mSyncMem(syncMem) {
-            (void)noUse;
+        mOwner(owner), mIgbp(producer), mSyncMem(syncMem) {
 }
 
 C2BufferQueueBlockPoolData::~C2BufferQueueBlockPoolData() {
@@ -716,7 +810,7 @@
     }
 
     if (mLocal) {
-        if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId) {
+        if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId && !mOwner.expired()) {
             C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
             if (syncVar) {
                 syncVar->lock();
@@ -804,7 +898,7 @@
 
     HBuffer hBuffer{};
     uint32_t hGenerationNumber{};
-    if (!b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
+    if (!hgbp_b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
         ALOGD("I to O conversion failed");
         return -1;
     }
@@ -1000,3 +1094,10 @@
         mImpl->setRenderCallback(renderCallback);
     }
 }
+
+void C2BufferQueueBlockPool::getConsumerUsage(uint64_t *consumeUsage) {
+    if (mImpl) {
+        mImpl->getConsumerUsage(consumeUsage);
+    }
+}
+
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index e55bdc0..2115cc3 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -228,10 +228,10 @@
     tv.tv_nsec = timeoutNs % 1000000000;
 
     int ret =  syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
-    if (ret == 0 || ret == EAGAIN) {
+    if (ret == 0 || errno == EAGAIN) {
         return C2_OK;
     }
-    if (ret == EINTR || ret == ETIMEDOUT) {
+    if (errno == EINTR || errno == ETIMEDOUT) {
         return C2_TIMED_OUT;
     }
     return C2_BAD_VALUE;
diff --git a/media/codecs/amrnb/common/Android.bp b/media/codecs/amrnb/common/Android.bp
index bae65f3..0bc6ed2 100644
--- a/media/codecs/amrnb/common/Android.bp
+++ b/media/codecs/amrnb/common/Android.bp
@@ -22,6 +22,10 @@
     vendor_available: true,
     host_supported: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     srcs: [
         "src/add.cpp",
diff --git a/media/codecs/amrnb/dec/Android.bp b/media/codecs/amrnb/dec/Android.bp
index 1083b82..70741d2 100644
--- a/media/codecs/amrnb/dec/Android.bp
+++ b/media/codecs/amrnb/dec/Android.bp
@@ -35,6 +35,10 @@
     vendor_available: true,
     host_supported: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     srcs: [
         "src/a_refl.cpp",
diff --git a/media/codecs/amrnb/enc/Android.bp b/media/codecs/amrnb/enc/Android.bp
index 9e947e9..3c6566e 100644
--- a/media/codecs/amrnb/enc/Android.bp
+++ b/media/codecs/amrnb/enc/Android.bp
@@ -34,6 +34,10 @@
     name: "libstagefright_amrnbenc",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     srcs: [
         "src/amrencode.cpp",
diff --git a/media/codecs/amrwb/dec/Android.bp b/media/codecs/amrwb/dec/Android.bp
index 228ea80..f16b0fe 100644
--- a/media/codecs/amrwb/dec/Android.bp
+++ b/media/codecs/amrwb/dec/Android.bp
@@ -35,6 +35,10 @@
     vendor_available: true,
     host_supported: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     srcs: [
         "src/agc2_amr_wb.cpp",
diff --git a/media/codecs/amrwb/enc/Android.bp b/media/codecs/amrwb/enc/Android.bp
index cc72eb7..8780136 100644
--- a/media/codecs/amrwb/enc/Android.bp
+++ b/media/codecs/amrwb/enc/Android.bp
@@ -21,6 +21,10 @@
     name: "libstagefright_amrwbenc",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     srcs: [
         "src/autocorr.c",
@@ -139,11 +143,6 @@
         },
     },
 
-    include_dirs: [
-        "frameworks/av/include",
-        "frameworks/av/media/libstagefright/include",
-    ],
-
     local_include_dirs: ["src"],
     export_include_dirs: ["inc"],
 
diff --git a/media/codecs/m4v_h263/dec/src/vop.cpp b/media/codecs/m4v_h263/dec/src/vop.cpp
index 7b32498..abc0861 100644
--- a/media/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/codecs/m4v_h263/dec/src/vop.cpp
@@ -107,26 +107,57 @@
 #ifndef PV_TOLERATE_VOL_ERRORS
         if (layer)                                                      /*    */
         {
-            /* support SSPL0-2  */
-            if (tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
-                    tmpvar != 0xA1 && tmpvar != 0xA2  && tmpvar != 0xA3/* Core SP@L1-L3 */)
-                return PV_FAIL;
+            switch (tmpvar)
+            {
+                /* Simple Scalable Profile Levels */
+                case 0x10:
+                case 0x11:
+                case 0x12:
+                /* Core Scalable Profile Levels */
+                case 0xA1:
+                case 0xA2:
+                case 0xA3:
+                    // Do Nothing, the cases listed above are supported values
+                    break;
+                default:
+                    // Unsupport profile level
+                    return PV_FAIL;
+              }
         }
         else
         {
-            /* support SPL0-3 & SSPL0-2   */
-            if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
-                    /* While not technically supported, try to decode SPL4&SPL5 files as well. */
-                    /* We'll fail later if the size is too large.  This is to allow playback of */
-                    /* some <=CIF files generated by other encoders. */
-                    tmpvar != 0x04 && tmpvar != 0x05 &&
-                    tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
-                    tmpvar != 0x21 && tmpvar != 0x22 &&  /* Core Profile Levels */
-                    tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
-                    tmpvar != 0xF0 && tmpvar != 0xF1 && /* Advanced Simple Profile Levels*/
-                    tmpvar != 0xF2 && tmpvar != 0xF3 &&
-                    tmpvar != 0xF4 && tmpvar != 0xF5)
-                return PV_FAIL;
+            switch (tmpvar)
+            {
+                /* Simple Profile Levels */
+                case 0x01:
+                case 0x02:
+                case 0x03:
+                case 0x04:
+                case 0x05:
+                case 0x06:
+                case 0x08:
+                case 0x10:
+                case 0x11:
+                case 0x12:
+                /* Core Profile Levels */
+                case 0x21:
+                case 0x22:
+                case 0xA1:
+                case 0xA2:
+                case 0xA3:
+                /* Advanced Simple Profile Levels*/
+                case 0xF0:
+                case 0xF1:
+                case 0xF2:
+                case 0xF3:
+                case 0xF4:
+                case 0xF5:
+                    // Do Nothing, the cases listed above are supported values
+                    break;
+                default:
+                    // Unsupport profile level
+                    return PV_FAIL;
+            }
         }
 #else
         profile = tmpvar;
diff --git a/media/codecs/m4v_h263/enc/src/fastidct.cpp b/media/codecs/m4v_h263/enc/src/fastidct.cpp
index 688effc..ec1b28f 100644
--- a/media/codecs/m4v_h263/enc/src/fastidct.cpp
+++ b/media/codecs/m4v_h263/enc/src/fastidct.cpp
@@ -76,6 +76,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col2(Short *blk)
 {
     int32 x0, x1, x3, x5, x7;//, x8;
@@ -102,6 +104,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col3(Short *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -137,6 +141,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col4(Short *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -180,6 +186,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col0x40(Short *blk)
 {
     int32 x1, x3, x5, x7;//, x8;
@@ -230,6 +238,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col0x10(Short *blk)
 {
     int32 x1, x3, x5,  x7;
@@ -256,6 +266,8 @@
 
 #endif /* SMALL_DCT */
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col(Short *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -368,6 +380,8 @@
     return;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row2Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x4, x5;
@@ -427,6 +441,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row3Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -497,6 +513,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row4Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -573,6 +591,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x40Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x1, x2, x4, x5;
@@ -686,6 +706,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x10Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x1, x3, x5, x7;
@@ -741,6 +763,8 @@
 
 #endif /* SMALL_DCT */
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_rowInter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -864,6 +888,8 @@
     return;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row2Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x4, x5;
@@ -919,6 +945,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row3Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -985,6 +1013,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row4Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1058,6 +1088,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x40Intra(Short *blk, UChar *rec, Int lx)
 {
     int32  x1, x2, x4, x5;
@@ -1166,6 +1198,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x10Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x1, x3, x5, x7;
@@ -1218,6 +1252,8 @@
 }
 
 #endif /* SMALL_DCT */
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_rowIntra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1364,6 +1400,8 @@
     return;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row2zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x4, x5;
@@ -1424,6 +1462,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row3zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1495,6 +1535,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row4zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1572,6 +1614,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x40zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x1, x2, x4, x5;
@@ -1687,6 +1731,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x10zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x1, x3, x5, x7;
@@ -1743,6 +1789,8 @@
 
 #endif /* SMALL_DCT */
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_rowzmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index 00b2ab6..b295258 100644
--- a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -501,13 +501,16 @@
     /* check frame rate */
     for (i = 0; i < encParams->nLayers; i++)
     {
+        if (encOption->encFrameRate[i] <= 0. || encOption->encFrameRate[i] > 120)
+        {
+            goto CLEAN_UP;
+        }
         encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
     }
 
     if (encParams->nLayers > 1)
     {
-        if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
-                encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
+        if (encOption->encFrameRate[0] == encOption->encFrameRate[1])
             goto CLEAN_UP;
     }
     /* set max frame rate */
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
index 912c821..5e613d9 100644
--- a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
@@ -50,7 +50,7 @@
 
  private:
   tagvideoDecControls *mDecHandle = nullptr;
-  uint8_t *mOutputBuffer[kNumOutputBuffers];
+  uint8_t *mOutputBuffer[kNumOutputBuffers] = {};
   bool mInitialized = false;
   bool mFramesConfigured = false;
 #ifdef MPEG4
diff --git a/media/codecs/mp3dec/Android.bp b/media/codecs/mp3dec/Android.bp
index 015b8b6..6659ea5 100644
--- a/media/codecs/mp3dec/Android.bp
+++ b/media/codecs/mp3dec/Android.bp
@@ -47,6 +47,10 @@
     name: "libstagefright_mp3dec",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     host_supported:true,
     srcs: [
@@ -108,8 +112,6 @@
         cfi: true,
     },
 
-    include_dirs: ["frameworks/av/media/libstagefright/include"],
-
     header_libs: ["libstagefright_mp3dec_headers"],
     export_header_lib_headers: ["libstagefright_mp3dec_headers"],
 
diff --git a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
index 4338c43..b3ecc77 100644
--- a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
+++ b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
@@ -219,6 +219,10 @@
 ; FUNCTION CODE
 ----------------------------------------------------------------------------*/
 
+// deliberately plays near overflow points of int32
+#if __has_attribute(no_sanitize)
+__attribute__((no_sanitize("integer")))
+#endif
 void pvmp3_st_intensity(int32 xr[SUBBANDS_NUMBER*FILTERBANK_BANDS],
                         int32 xl[SUBBANDS_NUMBER*FILTERBANK_BANDS],
                         int32 is_pos,
diff --git a/media/extractors/Android.bp b/media/extractors/Android.bp
index 7513cb1..f654ecd 100644
--- a/media/extractors/Android.bp
+++ b/media/extractors/Android.bp
@@ -24,21 +24,19 @@
 cc_defaults {
     name: "extractor-defaults",
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
-    ],
-
     shared_libs: [
         "liblog",
     ],
 
-    // extractors are supposed to work on Q(29)
+    // extractors are expected to run on Q(29)
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
 
     relative_install_path: "extractors",
 
-    compile_multilib: "first",
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/extractors/TEST_MAPPING b/media/extractors/TEST_MAPPING
index 4984b8f..a7c2cfe 100644
--- a/media/extractors/TEST_MAPPING
+++ b/media/extractors/TEST_MAPPING
@@ -1,6 +1,9 @@
 {
   "presubmit": [
 
+        {
+            "name": "CtsMediaTranscodingTestCases"
+        }
     // TODO(b/153661591) enable test once the bug is fixed
     // This tests the extractor path
     // {
diff --git a/media/extractors/aac/Android.bp b/media/extractors/aac/Android.bp
index 7bf3a13..a926422 100644
--- a/media/extractors/aac/Android.bp
+++ b/media/extractors/aac/Android.bp
@@ -21,6 +21,10 @@
 
     srcs: ["AACExtractor.cpp"],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     static_libs: [
         "libstagefright_foundation",
         "libstagefright_metadatautils",
diff --git a/media/extractors/aac/AACExtractor.h b/media/extractors/aac/include/AACExtractor.h
similarity index 100%
rename from media/extractors/aac/AACExtractor.h
rename to media/extractors/aac/include/AACExtractor.h
diff --git a/media/extractors/amr/Android.bp b/media/extractors/amr/Android.bp
index 712360d..121b7a3 100644
--- a/media/extractors/amr/Android.bp
+++ b/media/extractors/amr/Android.bp
@@ -21,6 +21,10 @@
 
     srcs: ["AMRExtractor.cpp"],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     static_libs: [
         "libstagefright_foundation",
     ],
diff --git a/media/extractors/amr/AMRExtractor.h b/media/extractors/amr/include/AMRExtractor.h
similarity index 100%
rename from media/extractors/amr/AMRExtractor.h
rename to media/extractors/amr/include/AMRExtractor.h
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 9a2a76b..834f4ad 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -19,12 +19,12 @@
 
 cc_library {
     name: "libflacextractor",
-    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
+    defaults: ["extractor-defaults"],
 
     srcs: ["FLACExtractor.cpp"],
 
-    include_dirs: [
-        "external/flac/include",
+    export_include_dirs: [
+        "include",
     ],
 
     shared_libs: [
diff --git a/media/extractors/flac/FLACExtractor.h b/media/extractors/flac/include/FLACExtractor.h
similarity index 100%
rename from media/extractors/flac/FLACExtractor.h
rename to media/extractors/flac/include/FLACExtractor.h
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
index 0e54b58..490e195 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/extractors/fuzzers/Android.bp
@@ -80,11 +80,6 @@
     defaults: ["extractor-fuzzer-defaults"],
     host_supported: true,
 
-    include_dirs: [
-        "frameworks/av/media/extractors/mpeg2",
-        "frameworks/av/media/libstagefright",
-    ],
-
     static_libs: [
         "libstagefright_foundation_without_imemory",
         "libstagefright_mpeg2support",
@@ -124,14 +119,6 @@
         "mp4_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/mp4",
-    ],
-
-    header_libs: [
-        "libaudioclient_headers",
-    ],
-
     static_libs: [
         "libstagefright_id3",
         "libstagefright_esds",
@@ -150,10 +137,6 @@
         "wav_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/wav",
-    ],
-
     static_libs: [
         "libfifo",
         "libwavextractor",
@@ -173,10 +156,6 @@
         "amr_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/amr",
-    ],
-
     static_libs: [
         "libamrextractor",
     ],
@@ -193,10 +172,6 @@
         "mkv_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/mkv",
-    ],
-
     static_libs: [
         "libwebm",
         "libstagefright_flacdec",
@@ -217,9 +192,6 @@
         "ogg_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/ogg",
-    ],
 
     static_libs: [
         "libstagefright_metadatautils",
@@ -265,10 +237,6 @@
         "mp3_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/mp3",
-    ],
-
     static_libs: [
         "libfifo",
         "libmp3extractor",
@@ -285,10 +253,6 @@
         "aac_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/aac",
-    ],
-
     static_libs: [
         "libaacextractor",
         "libstagefright_metadatautils",
@@ -304,10 +268,6 @@
         "flac_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/flac",
-    ],
-
     static_libs: [
         "libstagefright_metadatautils",
         "libFLAC",
@@ -329,10 +289,6 @@
         "midi_extractor_fuzzer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/midi",
-    ],
-
     static_libs: [
         "libsonivox",
         "libmedia_midiiowrapper",
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index 08a6fa0..feabf9e 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -23,6 +23,10 @@
 
     srcs: ["MidiExtractor.cpp"],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     header_libs: [
         "libmedia_datasource_headers",
     ],
diff --git a/media/extractors/midi/MidiExtractor.h b/media/extractors/midi/include/MidiExtractor.h
similarity index 100%
rename from media/extractors/midi/MidiExtractor.h
rename to media/extractors/midi/include/MidiExtractor.h
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 54c5b27..98ce305 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -21,10 +21,8 @@
 
     srcs: ["MatroskaExtractor.cpp"],
 
-    include_dirs: [
-        "external/flac/include",
-        "external/libvpx/libwebm",
-        "frameworks/av/media/libstagefright/flac/dec",
+    export_include_dirs: [
+        "include",
     ],
 
     shared_libs: [
diff --git a/media/extractors/mkv/MatroskaExtractor.h b/media/extractors/mkv/include/MatroskaExtractor.h
similarity index 100%
rename from media/extractors/mkv/MatroskaExtractor.h
rename to media/extractors/mkv/include/MatroskaExtractor.h
diff --git a/media/extractors/mp3/Android.bp b/media/extractors/mp3/Android.bp
index 75b9b7b..396a13a 100644
--- a/media/extractors/mp3/Android.bp
+++ b/media/extractors/mp3/Android.bp
@@ -16,6 +16,10 @@
             "XINGSeeker.cpp",
     ],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     static_libs: [
         "libutils",
         "libstagefright_id3",
diff --git a/media/extractors/mp3/MP3Extractor.h b/media/extractors/mp3/include/MP3Extractor.h
similarity index 100%
rename from media/extractors/mp3/MP3Extractor.h
rename to media/extractors/mp3/include/MP3Extractor.h
diff --git a/media/extractors/mp3/MP3Seeker.h b/media/extractors/mp3/include/MP3Seeker.h
similarity index 100%
rename from media/extractors/mp3/MP3Seeker.h
rename to media/extractors/mp3/include/MP3Seeker.h
diff --git a/media/extractors/mp3/VBRISeeker.h b/media/extractors/mp3/include/VBRISeeker.h
similarity index 100%
rename from media/extractors/mp3/VBRISeeker.h
rename to media/extractors/mp3/include/VBRISeeker.h
diff --git a/media/extractors/mp3/XINGSeeker.h b/media/extractors/mp3/include/XINGSeeker.h
similarity index 100%
rename from media/extractors/mp3/XINGSeeker.h
rename to media/extractors/mp3/include/XINGSeeker.h
diff --git a/media/extractors/mp4/Android.bp b/media/extractors/mp4/Android.bp
index 7fa6bfd..540d75d 100644
--- a/media/extractors/mp4/Android.bp
+++ b/media/extractors/mp4/Android.bp
@@ -15,6 +15,15 @@
     ],
 }
 
+cc_library_headers {
+    name: "libmp4extractor_headers",
+    host_supported: true,
+
+    export_include_dirs: [
+        "include",
+    ],
+}
+
 cc_library {
     name: "libmp4extractor",
     defaults: ["extractor-defaults"],
@@ -27,6 +36,10 @@
         "SampleTable.cpp",
     ],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     static_libs: [
         "libstagefright_esds",
         "libstagefright_foundation",
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 65c0238..7f97ddc 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1132,10 +1132,10 @@
                         && size >= 5) {
                         const uint8_t *ptr = (const uint8_t *)data;
                         const uint8_t profile = ptr[2] >> 1;
-                        const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
+                        const uint8_t blCompatibilityId = (ptr[4]) >> 4;
                         bool create_two_tracks = false;
 
-                        if (bl_compatibility_id && bl_compatibility_id != 15) {
+                        if (blCompatibilityId && blCompatibilityId != 15) {
                             create_two_tracks = true;
                         }
 
@@ -1147,13 +1147,15 @@
 
                             track_b->timescale = mLastTrack->timescale;
                             track_b->sampleTable = mLastTrack->sampleTable;
-                            track_b->includes_expensive_metadata = mLastTrack->includes_expensive_metadata;
+                            track_b->includes_expensive_metadata =
+                                mLastTrack->includes_expensive_metadata;
                             track_b->skipTrack = mLastTrack->skipTrack;
                             track_b->elst_needs_processing = mLastTrack->elst_needs_processing;
                             track_b->elst_media_time = mLastTrack->elst_media_time;
                             track_b->elst_segment_duration = mLastTrack->elst_segment_duration;
                             track_b->elst_shift_start_ticks = mLastTrack->elst_shift_start_ticks;
-                            track_b->elst_initial_empty_edit_ticks = mLastTrack->elst_initial_empty_edit_ticks;
+                            track_b->elst_initial_empty_edit_ticks =
+                                mLastTrack->elst_initial_empty_edit_ticks;
                             track_b->subsample_encryption = mLastTrack->subsample_encryption;
 
                             track_b->mTx3gBuffer = mLastTrack->mTx3gBuffer;
@@ -2591,9 +2593,11 @@
             *offset += chunk_size;
             break;
         }
-        case FOURCC("dvcC"):
-        case FOURCC("dvvC"): {
 
+        case FOURCC("dvcC"):
+        case FOURCC("dvvC"):
+        case FOURCC("dvwC"):
+        {
             if (chunk_data_size != 24) {
                 return ERROR_MALFORMED;
             }
@@ -2613,13 +2617,14 @@
                 return ERROR_MALFORMED;
 
             AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
-                                   buffer.get(), chunk_data_size);
+                                    buffer.get(), chunk_data_size);
             AMediaFormat_setString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME,
                                    MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
 
             *offset += chunk_size;
             break;
         }
+
         case FOURCC("d263"):
         {
             *offset += chunk_size;
@@ -3480,7 +3485,7 @@
         }
         unsigned mask = br.getBits(8);
         for (unsigned i = 0; i < 8; i++) {
-            if (((0x1 << i) && mask) == 0)
+            if (((0x1 << i) & mask) == 0)
                 continue;
 
             if (br.numBitsLeft() < 8) {
@@ -4458,7 +4463,6 @@
     if (!AMediaFormat_getString(track->meta, AMEDIAFORMAT_KEY_MIME, &mime)) {
         return NULL;
     }
-
     sp<ItemTable> itemTable;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         void *data;
@@ -4491,14 +4495,14 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         void *data;
         size_t size;
-        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)
+                || size != 24) {
             return NULL;
         }
 
         const uint8_t *ptr = (const uint8_t *)data;
-
         // dv_major.dv_minor Should be 1.0 or 2.1
-        if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
+        if ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)) {
             return NULL;
         }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
diff --git a/media/extractors/mp4/AC4Parser.h b/media/extractors/mp4/include/AC4Parser.h
similarity index 100%
rename from media/extractors/mp4/AC4Parser.h
rename to media/extractors/mp4/include/AC4Parser.h
diff --git a/media/extractors/mp4/ItemTable.h b/media/extractors/mp4/include/ItemTable.h
similarity index 100%
rename from media/extractors/mp4/ItemTable.h
rename to media/extractors/mp4/include/ItemTable.h
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/include/MPEG4Extractor.h
similarity index 100%
rename from media/extractors/mp4/MPEG4Extractor.h
rename to media/extractors/mp4/include/MPEG4Extractor.h
diff --git a/media/extractors/mp4/SampleIterator.h b/media/extractors/mp4/include/SampleIterator.h
similarity index 100%
rename from media/extractors/mp4/SampleIterator.h
rename to media/extractors/mp4/include/SampleIterator.h
diff --git a/media/extractors/mp4/SampleTable.h b/media/extractors/mp4/include/SampleTable.h
similarity index 100%
rename from media/extractors/mp4/SampleTable.h
rename to media/extractors/mp4/include/SampleTable.h
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 7e6247b..aa59a0c 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -38,6 +38,10 @@
         "MPEG2TSExtractor.cpp",
     ],
 
+    export_include_dirs: [
+        "include",
+    ],
+
     shared_libs: [
         "libbase",
         "libcgrouprc#29",
@@ -61,6 +65,7 @@
         "libhidlbase",
         "libhidlmemory",
         "libjsoncpp",
+        "libmedia_helper",
         "libprocessgroup",
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
index d431b05..afd28ef 100644
--- a/media/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -20,9 +20,6 @@
 
 #include "MPEG2PSExtractor.h"
 
-#include <AnotherPacketSource.h>
-#include <ESQueue.h>
-
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -33,6 +30,8 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
+#include <mpeg2ts/AnotherPacketSource.h>
+#include <mpeg2ts/ESQueue.h>
 #include <utils/String8.h>
 
 #include <inttypes.h>
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index 2e68809..9a3cd92 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -35,10 +35,9 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
+#include <mpeg2ts/AnotherPacketSource.h>
 #include <utils/String8.h>
 
-#include <AnotherPacketSource.h>
-
 #include <hidl/HybridInterface.h>
 #include <android/hardware/cas/1.0/ICas.h>
 
diff --git a/media/extractors/mpeg2/MPEG2PSExtractor.h b/media/extractors/mpeg2/include/MPEG2PSExtractor.h
similarity index 100%
rename from media/extractors/mpeg2/MPEG2PSExtractor.h
rename to media/extractors/mpeg2/include/MPEG2PSExtractor.h
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.h b/media/extractors/mpeg2/include/MPEG2TSExtractor.h
similarity index 98%
rename from media/extractors/mpeg2/MPEG2TSExtractor.h
rename to media/extractors/mpeg2/include/MPEG2TSExtractor.h
index fd77b08..0e3e484 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.h
+++ b/media/extractors/mpeg2/include/MPEG2TSExtractor.h
@@ -23,12 +23,11 @@
 #include <media/MediaExtractorPluginApi.h>
 #include <media/MediaExtractorPluginHelper.h>
 #include <media/stagefright/MetaDataBase.h>
+#include <mpeg2ts/ATSParser.h>
 #include <utils/threads.h>
 #include <utils/KeyedVector.h>
 #include <utils/Vector.h>
 
-#include <ATSParser.h>
-
 namespace android {
 
 struct AMessage;
diff --git a/media/extractors/ogg/Android.bp b/media/extractors/ogg/Android.bp
index d7540c4..dc3c25c 100644
--- a/media/extractors/ogg/Android.bp
+++ b/media/extractors/ogg/Android.bp
@@ -22,8 +22,8 @@
 
     srcs: ["OggExtractor.cpp"],
 
-    include_dirs: [
-        "external/tremolo",
+    export_include_dirs: [
+        "include",
     ],
 
     header_libs: [
diff --git a/media/extractors/ogg/OggExtractor.h b/media/extractors/ogg/include/OggExtractor.h
similarity index 100%
rename from media/extractors/ogg/OggExtractor.h
rename to media/extractors/ogg/include/OggExtractor.h
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index 5d97d9a..3c3bbdc 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -45,14 +45,11 @@
         "libdatasource",
         "libwatchdog",
 
-        "libstagefright",
         "libstagefright_id3",
         "libstagefright_flacdec",
         "libstagefright_esds",
         "libstagefright_mpeg2support",
-        "libstagefright_mpeg2extractor",
         "libstagefright_foundation_colorutils_ndk",
-        "libstagefright_foundation",
         "libstagefright_metadatautils",
 
         "libmedia_midiiowrapper",
@@ -74,17 +71,14 @@
         "libcutils",
         "libmediandk",
         "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
         "libcrypto",
         "libhidlmemory",
         "libhidlbase",
         "libbase",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/",
-        "frameworks/av/media/libstagefright/",
-    ],
-
     compile_multilib: "first",
 
     cflags: [
diff --git a/media/extractors/tests/ExtractorUnitTest.cpp b/media/extractors/tests/ExtractorUnitTest.cpp
index 84ec1f2..2bd9c6a 100644
--- a/media/extractors/tests/ExtractorUnitTest.cpp
+++ b/media/extractors/tests/ExtractorUnitTest.cpp
@@ -27,18 +27,18 @@
 #include <media/stagefright/MetaDataUtils.h>
 #include <media/stagefright/foundation/OpusHeader.h>
 
-#include "aac/AACExtractor.h"
-#include "amr/AMRExtractor.h"
-#include "flac/FLACExtractor.h"
-#include "midi/MidiExtractor.h"
-#include "mkv/MatroskaExtractor.h"
-#include "mp3/MP3Extractor.h"
-#include "mp4/MPEG4Extractor.h"
-#include "mp4/SampleTable.h"
-#include "mpeg2/MPEG2PSExtractor.h"
-#include "mpeg2/MPEG2TSExtractor.h"
-#include "ogg/OggExtractor.h"
-#include "wav/WAVExtractor.h"
+#include <AACExtractor.h>
+#include <AMRExtractor.h>
+#include <FLACExtractor.h>
+#include <MidiExtractor.h>
+#include <MatroskaExtractor.h>
+#include <MP3Extractor.h>
+#include <MPEG4Extractor.h>
+#include <SampleTable.h>
+#include <MPEG2PSExtractor.h>
+#include <MPEG2TSExtractor.h>
+#include <OggExtractor.h>
+#include <WAVExtractor.h>
 
 #include "ExtractorUnitTestEnvironment.h"
 
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index cc5e1c7..cdf587c 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -18,12 +18,12 @@
 cc_library {
     name: "libwavextractor",
 
-    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
+    defaults: ["extractor-defaults"],
 
     srcs: ["WAVExtractor.cpp"],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
+    export_include_dirs: [
+        "include",
     ],
 
     shared_libs: [
diff --git a/media/extractors/wav/WAVExtractor.h b/media/extractors/wav/include/WAVExtractor.h
similarity index 100%
rename from media/extractors/wav/WAVExtractor.h
rename to media/extractors/wav/include/WAVExtractor.h
diff --git a/media/janitors/codec_OWNERS b/media/janitors/codec_OWNERS
index e201399..d4ee51b 100644
--- a/media/janitors/codec_OWNERS
+++ b/media/janitors/codec_OWNERS
@@ -2,4 +2,4 @@
 # differentiated from plugins connecting those codecs to either omx or codec2 infrastructure
 essick@google.com
 lajos@google.com
-marcone@google.com
+wonsik@google.com
diff --git a/media/janitors/media_leads_OWNERS b/media/janitors/media_leads_OWNERS
new file mode 100644
index 0000000..b7dbdee
--- /dev/null
+++ b/media/janitors/media_leads_OWNERS
@@ -0,0 +1,9 @@
+# gerrit owner/approvers corresponding to the TLs within the media team
+# loosely (as of 2022/3) fgoldfain@ and direct reports
+arifdikici@google.com
+elaurent@google.com
+fgoldfain@google.com    #{LAST_RESORT_SUGGESTION}
+lajos@google.com
+nchalko@google.com
+olly@google.com
+robertshih@google.com
diff --git a/media/janitors/reliability_mainline_OWNERS b/media/janitors/reliability_mainline_OWNERS
new file mode 100644
index 0000000..cced19c
--- /dev/null
+++ b/media/janitors/reliability_mainline_OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1051309
+# go/android-media-reliability
+
+essick@google.com
+nchalko@google.com
diff --git a/media/libaaudio/TEST_MAPPING b/media/libaaudio/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9fa
--- /dev/null
+++ b/media/libaaudio/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 7daac20..956b3cd 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -40,6 +40,31 @@
     int64_t nanoseconds;
 } Timestamp;
 
+static constexpr int32_t   kWorkloadScaler = 500;
+
+// Linear congruential random number generator.
+static uint32_t s_random16() {
+    static uint32_t seed = 1234;
+    seed = ((seed * 31421) + 6927) & 0x0FFFF;
+    return seed;
+}
+
+/**
+ * The random number generator is good for burning CPU because the compiler cannot
+ * easily optimize away the computation.
+ * @param workload number of times to execute the loop
+ * @return a white noise value between -1.0 and +1.0
+ */
+static float s_burnCPU(int32_t workload) {
+    uint32_t random = 0;
+    for (int32_t i = 0; i < workload; i++) {
+        for (int32_t j = 0; j < 10; j++) {
+            random = random ^ s_random16();
+        }
+    }
+    return (random - 32768) * (1.0 / 32768);
+}
+
 /**
  * Simple wrapper for AAudio that opens an output stream either in callback or blocking write mode.
  */
@@ -268,11 +293,13 @@
     int32_t            timestampCount = 0; // in timestamps
     int32_t            sampleRate = 48000;
     int32_t            prefixToneFrames = 0;
+    double             workload = 0.0;
     bool               sweepSetup = false;
 
     int                scheduler = 0;
     bool               schedulerChecked = false;
     int32_t            hangTimeMSec = 0;
+    int                cpuAffinity = -1;
 
     AAudioSimplePlayer simplePlayer;
     int32_t            callbackCount = 0;
@@ -304,6 +331,14 @@
 
 } SineThreadedData_t;
 
+int setCpuAffinity(int cpuIndex) {
+cpu_set_t cpu_set;
+    CPU_ZERO(&cpu_set);
+    CPU_SET(cpuIndex, &cpu_set);
+    int err = sched_setaffinity((pid_t) 0, sizeof(cpu_set_t), &cpu_set);
+    return err == 0 ? 0 : -errno;
+}
+
 // Callback function that fills the audio output buffer.
 aaudio_data_callback_result_t SimplePlayerDataCallbackProc(
         AAudioStream *stream,
@@ -319,6 +354,10 @@
     }
     SineThreadedData_t *sineData = (SineThreadedData_t *) userData;
 
+    if (sineData->cpuAffinity >= 0) {
+        setCpuAffinity(sineData->cpuAffinity);
+        sineData->cpuAffinity = -1;
+    }
     // Play an initial high tone so we can tell whether the beginning was truncated.
     if (!sineData->sweepSetup && sineData->framesTotal >= sineData->prefixToneFrames) {
         sineData->setupSineSweeps();
@@ -398,6 +437,8 @@
             return AAUDIO_CALLBACK_RESULT_STOP;
     }
 
+    s_burnCPU((int32_t)(sineData->workload * kWorkloadScaler * numFrames));
+
     sineData->callbackCount++;
     sineData->framesTotal += numFrames;
     return AAUDIO_CALLBACK_RESULT_CONTINUE;
diff --git a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
index cdc987b..400fc7c 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine_callback.cpp
@@ -31,10 +31,10 @@
 #include "AAudioSimplePlayer.h"
 #include "AAudioArgsParser.h"
 
-#define APP_VERSION  "0.1.8"
+#define APP_VERSION  "0.2.1"
 
-constexpr int32_t kDefaultHangTimeMSec = 10;
-
+static constexpr int32_t kDefaultHangTimeMSec = 10;
+static constexpr int32_t kWorkPeriodSeconds = 6;
 /**
  * Open stream, play some sine waves, then close the stream.
  *
@@ -44,7 +44,11 @@
 static aaudio_result_t testOpenPlayClose(AAudioArgsParser &argParser,
                                          int32_t loopCount,
                                          int32_t prefixToneMsec,
-                                         int32_t hangTimeMSec)
+                                         int32_t hangTimeMSec,
+                                         int     cpuAffinity,
+                                         double  lowWorkLoad,
+                                         double  highWorkLoad,
+                                         int32_t workPeriodSeconds)
 {
     SineThreadedData_t myData;
     AAudioSimplePlayer &player = myData.simplePlayer;
@@ -57,6 +61,7 @@
     myData.schedulerChecked = false;
     myData.callbackCount = 0;
     myData.hangTimeMSec = hangTimeMSec; // test AAudioStream_getXRunCount()
+    myData.cpuAffinity = cpuAffinity;
 
     result = player.open(argParser,
                          SimplePlayerDataCallbackProc,
@@ -111,8 +116,8 @@
         }
 
         // Play a sine wave in the background.
-        printf("Sleep for %d seconds while audio plays in a callback thread. %d of %d\n",
-               argParser.getDurationSeconds(), (loopIndex + 1), loopCount);
+        printf("Monitor for %d seconds while audio plays in a callback thread. %d of %d, %d\n",
+               argParser.getDurationSeconds(), (loopIndex + 1), loopCount, workPeriodSeconds);
         startedAtNanos = getNanoseconds(CLOCK_MONOTONIC);
         for (int second = 0; second < durationSeconds; second++) {
             // Sleep a while. Wake up early if there is an error, for example a DISCONNECT.
@@ -123,13 +128,17 @@
             const int32_t framesWritten = (int32_t) AAudioStream_getFramesWritten(player.getStream());
             const int32_t framesRead = (int32_t) AAudioStream_getFramesRead(player.getStream());
             const int32_t xruns = AAudioStream_getXRunCount(player.getStream());
+            myData.workload = ((second % (2 * workPeriodSeconds)) < workPeriodSeconds)
+                    ? lowWorkLoad : highWorkLoad;
             printf(" waker result = %d, at %6d millis"
-                           ", second = %3d, frames written %8d - read %8d = %8d, underruns = %d\n",
+                   ", second = %3d, frames written %8d - read %8d = %8d"
+                   ", work = %5.1f, underruns = %d\n",
                    result, (int) millis,
                    second,
                    framesWritten,
                    framesRead,
                    framesWritten - framesRead,
+                   myData.workload,
                    xruns);
             if (result != AAUDIO_OK) {
                 disconnected = (result == AAUDIO_ERROR_DISCONNECTED);
@@ -220,6 +229,11 @@
     AAudioArgsParser::usage();
     printf("      -l{count} loopCount start/stop, every other one is silent\n");
     printf("      -t{msec}  play a high pitched tone at the beginning\n");
+    printf("      -w{workload}  set base workload, default 0.0\n");
+    printf("      -W{workload}  alternate between this higher workload and base workload\n");
+    printf("      -Z{duration}  number of seconds to spend at each workload, default = %d\n",
+           kWorkPeriodSeconds);
+    printf("      -a{cpu}   set CPU affinity, default none\n");
     printf("      -h{msec}  force periodic underruns by hanging in callback\n");
     printf("                If no value specified then %d used.\n",
             kDefaultHangTimeMSec);
@@ -232,6 +246,10 @@
     int32_t            loopCount = 1;
     int32_t            prefixToneMsec = 0;
     int32_t            hangTimeMSec = 0;
+    int                cpuAffinity = -1;
+    double             lowWorkLoad = 0.0;
+    double             highWorkLoad = -1.0;
+    int32_t            workPeriodSeconds = kWorkPeriodSeconds;
 
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
@@ -247,6 +265,9 @@
             if (arg[0] == '-') {
                 char option = arg[1];
                 switch (option) {
+                    case 'a':
+                        cpuAffinity = atoi(&arg[2]);
+                        break;
                     case 'l':
                         loopCount = atoi(&arg[2]);
                         break;
@@ -258,6 +279,15 @@
                                 ? atoi(&arg[2])
                                 : kDefaultHangTimeMSec;
                         break;
+                    case 'w':
+                        lowWorkLoad = atof(&arg[2]);
+                        break;
+                    case 'W':
+                        highWorkLoad = atof(&arg[2]);
+                        break;
+                    case 'Z':
+                        workPeriodSeconds = atoi(&arg[2]);
+                        break;
                     default:
                         usage();
                         exit(EXIT_FAILURE);
@@ -271,9 +301,21 @@
         }
     }
 
+    if (highWorkLoad > 0) {
+        if (highWorkLoad < lowWorkLoad) {
+            printf("ERROR - -W%f workload lower than -w%f workload", highWorkLoad, lowWorkLoad);
+            return EXIT_FAILURE;
+        }
+    } else {
+        highWorkLoad = lowWorkLoad; // high not specified so use low
+    }
+
     // Keep looping until we can complete the test without disconnecting.
     while((result = testOpenPlayClose(argParser, loopCount,
-            prefixToneMsec, hangTimeMSec))
+            prefixToneMsec, hangTimeMSec,
+            cpuAffinity,
+            lowWorkLoad, highWorkLoad,
+            workPeriodSeconds))
             == AAUDIO_ERROR_DISCONNECTED);
 
     return (result) ? EXIT_FAILURE : EXIT_SUCCESS;
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
new file mode 100644
index 0000000..2a12191
--- /dev/null
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+    name: "libaaudio_fuzzer",
+    srcs: [
+        "libaaudio_fuzzer.cpp",
+    ],
+    header_libs: [
+        "libaaudio_headers",
+    ],
+    shared_libs: [
+        "libbinder",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioclient_aidl_conversion",
+        "libutils",
+    ],
+    static_libs: [
+        "android.media.audio.common.types-V1-cpp",
+        "liblog",
+        "libcutils",
+        "libaaudio",
+        "libjsoncpp",
+        "libbase_ndk",
+        "libcgrouprc",
+        "libaudioutils",
+        "libaudioclient",
+        "aaudio-aidl-cpp",
+        "libmedia_helper",
+        "libmediametrics",
+        "libprocessgroup",
+        "av-types-aidl-cpp",
+        "libaaudio_internal",
+        "libcgrouprc_format",
+        "audiopolicy-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "shared-file-region-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "mediametricsservice-aidl-cpp",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libaaudio/fuzzer/README.md b/media/libaaudio/fuzzer/README.md
new file mode 100644
index 0000000..4ba15c5
--- /dev/null
+++ b/media/libaaudio/fuzzer/README.md
@@ -0,0 +1,77 @@
+# Fuzzer for libaaudio
+
+## Plugin Design Considerations
+The fuzzer plugin for `libaaudio` are designed based on the understanding of the
+source code and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Fuzzers assigns values to the following parameters to pass on to libaaudio:
+1. Device Id (parameter name: `deviceId`)
+2. Sampling Rate (parameter name: `sampleRate`)
+3. Number of channels (parameter name: `channelCount`)
+4. Audio Travel Direction (parameter name: `direction`)
+5. Audio Format (parameter name: `format`)
+6. Audio Sharing Mode (parameter name: `sharingMode`)
+7. Audio Usage (parameter name: `usage`)
+8. Audio Content type (parameter name: `contentType`)
+9. Audio Input Preset (parameter name: `inputPreset`)
+10. Audio Privacy Sensitivity (parameter name: `privacySensitive`)
+11. Buffer Capacity In Frames (parameter name: `frames`)
+12. Performance Mode (parameter name: `mode`)
+13. Allowed Capture Policy (parameter name: `allowedCapturePolicy`)
+14. Session Id (parameter name: `sessionId`)
+15. Frames per Data Callback (parameter name: `framesPerDataCallback`)
+16. MMap Policy (parameter name: `policy`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `deviceId` | Any value of type `int32_t`  | Value obtained from FuzzedDataProvider |
+| `sampleRate` | Any value of type `int32_t`  | Value obtained from FuzzedDataProvider |
+| `channelCount` |  Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `direction` | 0. `AAUDIO_DIRECTION_OUTPUT` 1. `AAUDIO_DIRECTION_INPUT` | Value obtained from FuzzedDataProvider |
+| `format` | 0. `AAUDIO_FORMAT_INVALID` 1. `AAUDIO_FORMAT_UNSPECIFIED` 2. `AAUDIO_FORMAT_PCM_I16` 3. `AAUDIO_FORMAT_PCM_FLOAT` | Value obtained from FuzzedDataProvider |
+| `sharingMode` | 0. `AAUDIO_SHARING_MODE_EXCLUSIVE` 1. `AAUDIO_SHARING_MODE_SHARED` | Value obtained from FuzzedDataProvider |
+| `usage` | 0. `AAUDIO_USAGE_MEDIA` 1. `AAUDIO_USAGE_VOICE_COMMUNICATION` 2. `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING` 3. `AAUDIO_USAGE_ALARM` 4. `AAUDIO_USAGE_NOTIFICATION` 5. `AAUDIO_USAGE_NOTIFICATION_RINGTONE` 6. `AAUDIO_USAGE_NOTIFICATION_EVENT` 7. `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY` 8. `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE` 9. `AAUDIO_USAGE_ASSISTANCE_SONIFICATION` 10. `AAUDIO_USAGE_GAME` 11. `AAUDIO_USAGE_ASSISTANT` 12. `AAUDIO_SYSTEM_USAGE_EMERGENCY` 13. `AAUDIO_SYSTEM_USAGE_SAFETY` 14. `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS` 15. `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value obtained from FuzzedDataProvider |
+| `contentType` | 0. `AAUDIO_CONTENT_TYPE_SPEECH` 1. `AAUDIO_CONTENT_TYPE_MUSIC` 2. `AAUDIO_CONTENT_TYPE_MOVIE` 3. `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value obtained from FuzzedDataProvider |
+| `inputPreset` | 0. `AAUDIO_INPUT_PRESET_GENERIC` 1. `AAUDIO_INPUT_PRESET_CAMCORDER` 2. `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION` 3. `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION` 4. `AAUDIO_INPUT_PRESET_UNPROCESSED` 5. `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value obtained from FuzzedDataProvider |
+| `privacySensitive` | 0. `true` 1. `false` | Value obtained from FuzzedDataProvider |
+| `frames` | Any value of type `int32_t`  | Value obtained from FuzzedDataProvider |
+| `mode` | 0. `AAUDIO_PERFORMANCE_MODE_NONE` 1. `AAUDIO_PERFORMANCE_MODE_POWER_SAVING` 2. `AAUDIO_PERFORMANCE_MODE_LOW_LATENCY` | Value obtained from FuzzedDataProvider |
+| `allowedCapturePolicy` | 0. `AAUDIO_ALLOW_CAPTURE_BY_ALL` 1. `AAUDIO_ALLOW_CAPTURE_BY_SYSTEM` 2. `AAUDIO_ALLOW_CAPTURE_BY_NONE` | Value obtained from FuzzedDataProvider |
+| `sessionId` | 0. `AAUDIO_SESSION_ID_NONE` 1. `AAUDIO_SESSION_ID_ALLOCATE` | Value obtained from FuzzedDataProvider |
+| `framesPerDataCallback` | Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `policy` | 0. `AAUDIO_POLICY_NEVER` 1. `AAUDIO_POLICY_AUTO` 2. `AAUDIO_POLICY_ALWAYS` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feed the entire input data to the module.
+This ensures that the plugins tolerates any kind of input (empty, huge,
+malformed, etc) and doesn't `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build libaaudio_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) libaaudio_fuzzer
+```
+### Steps to run
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/libaaudio_fuzzer/libaaudio_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
new file mode 100644
index 0000000..0233ee1
--- /dev/null
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include "aaudio/AAudio.h"
+#include "aaudio/AAudioTesting.h"
+#include <fuzzer/FuzzedDataProvider.h>
+
+constexpr int32_t kRandomStringLength = 256;
+
+constexpr int64_t kNanosPerMillisecond = 1000 * 1000;
+
+constexpr aaudio_direction_t kDirections[] = {
+    AAUDIO_DIRECTION_OUTPUT, AAUDIO_DIRECTION_INPUT, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_performance_mode_t kPerformanceModes[] = {
+    AAUDIO_PERFORMANCE_MODE_NONE, AAUDIO_PERFORMANCE_MODE_POWER_SAVING,
+    AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_format_t kFormats[] = {
+    AAUDIO_FORMAT_INVALID,        AAUDIO_FORMAT_UNSPECIFIED,
+    AAUDIO_FORMAT_PCM_I16,        AAUDIO_FORMAT_PCM_FLOAT,
+    AAUDIO_FORMAT_PCM_I24_PACKED, AAUDIO_FORMAT_PCM_I32};
+
+constexpr aaudio_sharing_mode_t kSharingModes[] = {
+    AAUDIO_SHARING_MODE_EXCLUSIVE, AAUDIO_SHARING_MODE_SHARED};
+
+constexpr int32_t kSampleRates[] = {AAUDIO_UNSPECIFIED,
+                                    8000,
+                                    11025,
+                                    16000,
+                                    22050,
+                                    32000,
+                                    44100,
+                                    48000,
+                                    88200,
+                                    96000};
+
+constexpr aaudio_usage_t kUsages[] = {
+    AAUDIO_USAGE_MEDIA,
+    AAUDIO_USAGE_VOICE_COMMUNICATION,
+    AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    AAUDIO_USAGE_ALARM,
+    AAUDIO_USAGE_NOTIFICATION,
+    AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+    AAUDIO_USAGE_NOTIFICATION_EVENT,
+    AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+    AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+    AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    AAUDIO_USAGE_GAME,
+    AAUDIO_USAGE_ASSISTANT,
+    AAUDIO_SYSTEM_USAGE_EMERGENCY,
+    AAUDIO_SYSTEM_USAGE_SAFETY,
+    AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+    AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT,
+    AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_content_type_t kContentTypes[] = {
+    AAUDIO_CONTENT_TYPE_SPEECH, AAUDIO_CONTENT_TYPE_MUSIC,
+    AAUDIO_CONTENT_TYPE_MOVIE, AAUDIO_CONTENT_TYPE_SONIFICATION,
+    AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_input_preset_t kInputPresets[] = {
+    AAUDIO_INPUT_PRESET_GENERIC,
+    AAUDIO_INPUT_PRESET_CAMCORDER,
+    AAUDIO_INPUT_PRESET_VOICE_RECOGNITION,
+    AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+    AAUDIO_INPUT_PRESET_UNPROCESSED,
+    AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE,
+    AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_allowed_capture_policy_t kAllowedCapturePolicies[] = {
+    AAUDIO_ALLOW_CAPTURE_BY_ALL, AAUDIO_ALLOW_CAPTURE_BY_SYSTEM,
+    AAUDIO_ALLOW_CAPTURE_BY_NONE, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_session_id_t kSessionIds[] = {
+    AAUDIO_SESSION_ID_NONE, AAUDIO_SESSION_ID_ALLOCATE, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_policy_t kPolicies[] = {
+    AAUDIO_POLICY_NEVER, AAUDIO_POLICY_AUTO, AAUDIO_POLICY_ALWAYS,
+    AAUDIO_UNSPECIFIED};
+
+class LibAaudioFuzzer {
+public:
+  ~LibAaudioFuzzer() { deInit(); }
+  bool init();
+  void process(const uint8_t *data, size_t size);
+  void deInit();
+
+private:
+  AAudioStreamBuilder *mAaudioBuilder = nullptr;
+  AAudioStream *mAaudioStream = nullptr;
+};
+
+bool LibAaudioFuzzer::init() {
+  aaudio_result_t result = AAudio_createStreamBuilder(&mAaudioBuilder);
+  if ((result != AAUDIO_OK) || (!mAaudioBuilder)) {
+    return false;
+  }
+  return true;
+}
+
+void LibAaudioFuzzer::process(const uint8_t *data, size_t size) {
+  FuzzedDataProvider fdp(data, size);
+  aaudio_performance_mode_t mode =
+      fdp.PickValueInArray({fdp.PickValueInArray(kPerformanceModes),
+                            fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setPerformanceMode(mAaudioBuilder, mode);
+
+  int32_t deviceId = fdp.PickValueInArray(
+      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setDeviceId(mAaudioBuilder, deviceId);
+
+  std::string packageName = fdp.PickValueInArray<std::string>(
+      {"android.nativemedia.aaudio", "android.app.appops.cts",
+       fdp.ConsumeRandomLengthString(kRandomStringLength)});
+  AAudioStreamBuilder_setPackageName(mAaudioBuilder, packageName.c_str());
+
+  std::string attributionTag =
+      fdp.ConsumeRandomLengthString(kRandomStringLength);
+  AAudioStreamBuilder_setAttributionTag(mAaudioBuilder, attributionTag.c_str());
+
+  int32_t sampleRate = fdp.PickValueInArray(kSampleRates);
+  AAudioStreamBuilder_setSampleRate(mAaudioBuilder, sampleRate);
+
+  int32_t channelCount = fdp.PickValueInArray(
+      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setChannelCount(mAaudioBuilder, channelCount);
+
+  aaudio_direction_t direction = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kDirections), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setDirection(mAaudioBuilder, direction);
+
+  aaudio_format_t format = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kFormats), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setFormat(mAaudioBuilder, format);
+
+  aaudio_sharing_mode_t sharingMode = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kSharingModes), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setSharingMode(mAaudioBuilder, sharingMode);
+
+  aaudio_usage_t usage = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kUsages), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setUsage(mAaudioBuilder, usage);
+
+  aaudio_content_type_t contentType = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kContentTypes), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setContentType(mAaudioBuilder, contentType);
+
+  aaudio_input_preset_t inputPreset = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kInputPresets), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setInputPreset(mAaudioBuilder, inputPreset);
+
+  bool privacySensitive = fdp.ConsumeBool();
+  AAudioStreamBuilder_setPrivacySensitive(mAaudioBuilder, privacySensitive);
+
+  int32_t frames = fdp.PickValueInArray(
+      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setBufferCapacityInFrames(mAaudioBuilder, frames);
+
+  aaudio_allowed_capture_policy_t allowedCapturePolicy =
+      fdp.PickValueInArray({fdp.PickValueInArray(kAllowedCapturePolicies),
+                            fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setAllowedCapturePolicy(mAaudioBuilder,
+                                              allowedCapturePolicy);
+
+  aaudio_session_id_t sessionId = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kSessionIds), fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setSessionId(mAaudioBuilder, sessionId);
+
+  AAudioStreamBuilder_setDataCallback(mAaudioBuilder, nullptr, nullptr);
+  AAudioStreamBuilder_setErrorCallback(mAaudioBuilder, nullptr, nullptr);
+
+  int32_t framesPerDataCallback = fdp.PickValueInArray(
+      {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+  AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder,
+                                               framesPerDataCallback);
+
+  aaudio_policy_t policy = fdp.PickValueInArray(
+      {fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
+  AAudio_setMMapPolicy(policy);
+  (void)AAudio_getMMapPolicy();
+
+  aaudio_result_t result =
+      AAudioStreamBuilder_openStream(mAaudioBuilder, &mAaudioStream);
+  if ((result != AAUDIO_OK) || (!mAaudioStream)) {
+    return;
+  }
+
+  int32_t framesPerBurst = AAudioStream_getFramesPerBurst(mAaudioStream);
+  uint8_t numberOfBursts = fdp.ConsumeIntegral<uint8_t>();
+  int32_t maxFrames = numberOfBursts * framesPerBurst;
+  int32_t requestedBufferSize =
+      fdp.ConsumeIntegral<uint16_t>() * framesPerBurst;
+  AAudioStream_setBufferSizeInFrames(mAaudioStream, requestedBufferSize);
+
+  int64_t position = 0, nanoseconds = 0;
+  AAudioStream_getTimestamp(mAaudioStream, CLOCK_MONOTONIC, &position,
+                            &nanoseconds);
+
+  AAudioStream_requestStart(mAaudioStream);
+
+  aaudio_format_t actualFormat = AAudioStream_getFormat(mAaudioStream);
+  int32_t actualChannelCount = AAudioStream_getChannelCount(mAaudioStream);
+
+  int32_t count = fdp.ConsumeIntegral<int32_t>();
+  direction = AAudioStream_getDirection(mAaudioStream);
+
+  if (actualFormat == AAUDIO_FORMAT_PCM_I16) {
+      std::vector<int16_t> inputShortData(maxFrames * actualChannelCount, 0x0);
+      if (direction == AAUDIO_DIRECTION_INPUT) {
+          AAudioStream_read(mAaudioStream, inputShortData.data(), maxFrames,
+                            count * kNanosPerMillisecond);
+    } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
+        AAudioStream_write(mAaudioStream, inputShortData.data(), maxFrames,
+                           count * kNanosPerMillisecond);
+    }
+  } else if (actualFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+      std::vector<float> inputFloatData(maxFrames * actualChannelCount, 0x0);
+      if (direction == AAUDIO_DIRECTION_INPUT) {
+          AAudioStream_read(mAaudioStream, inputFloatData.data(), maxFrames,
+                            count * kNanosPerMillisecond);
+    } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
+        AAudioStream_write(mAaudioStream, inputFloatData.data(), maxFrames,
+                           count * kNanosPerMillisecond);
+    }
+  }
+
+  aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+  AAudioStream_waitForStateChange(mAaudioStream, AAUDIO_STREAM_STATE_UNKNOWN,
+                                  &state, count * kNanosPerMillisecond);
+  (void)AAudio_convertStreamStateToText(state);
+
+  (void)AAudioStream_getUsage(mAaudioStream);
+  (void)AAudioStream_getSampleRate(mAaudioStream);
+  (void)AAudioStream_getState(mAaudioStream);
+  (void)AAudioStream_getSamplesPerFrame(mAaudioStream);
+  (void)AAudioStream_getContentType(mAaudioStream);
+  (void)AAudioStream_getInputPreset(mAaudioStream);
+  (void)AAudioStream_isPrivacySensitive(mAaudioStream);
+  (void)AAudioStream_getAllowedCapturePolicy(mAaudioStream);
+  (void)AAudioStream_getPerformanceMode(mAaudioStream);
+  (void)AAudioStream_getDeviceId(mAaudioStream);
+  (void)AAudioStream_getSharingMode(mAaudioStream);
+  (void)AAudioStream_getSessionId(mAaudioStream);
+  (void)AAudioStream_getFramesRead(mAaudioStream);
+  (void)AAudioStream_getFramesWritten(mAaudioStream);
+  (void)AAudioStream_getXRunCount(mAaudioStream);
+  (void)AAudioStream_getBufferCapacityInFrames(mAaudioStream);
+  (void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
+  (void)AAudioStream_isMMapUsed(mAaudioStream);
+
+  AAudioStream_requestPause(mAaudioStream);
+  AAudioStream_requestFlush(mAaudioStream);
+  AAudioStream_release(mAaudioStream);
+  AAudioStream_requestStop(mAaudioStream);
+}
+
+void LibAaudioFuzzer::deInit() {
+  if (mAaudioBuilder) {
+    AAudioStreamBuilder_delete(mAaudioBuilder);
+  }
+  if (mAaudioStream) {
+    AAudioStream_close(mAaudioStream);
+  }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  LibAaudioFuzzer libAaudioFuzzer;
+  if (libAaudioFuzzer.init()) {
+    libAaudioFuzzer.process(data, size);
+  }
+  return 0;
+}
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 212a787..2ff9f5a 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -20,7 +20,7 @@
  */
 
 /**
- * @file AAudio.h
+ * @file aaudio/AAudio.h
  */
 
 /**
@@ -778,8 +778,16 @@
         __INTRODUCED_IN(26);
 
 /**
- * Request an audio device identified device using an ID.
- * On Android, for example, the ID could be obtained from the Java AudioManager.
+ * Request an audio device identified by an ID.
+ *
+ * The ID could be obtained from the Java AudioManager.
+ * AudioManager.getDevices() returns an array of {@link AudioDeviceInfo},
+ * which contains a getId() method. That ID can be passed to this function.
+ *
+ * It is possible that you may not get the device that you requested.
+ * So if it is important to you, you should call
+ * AAudioStream_getDeviceId() after the stream is opened to
+ * verify the actual ID.
  *
  * The default, if you do not call this function, is {@link #AAUDIO_UNSPECIFIED},
  * in which case the primary device will be used.
@@ -798,8 +806,11 @@
  * This is usually {@code Context#getPackageName()}.
  *
  * The default, if you do not call this function, is a random package in the calling uid.
- * The vast majority of apps have only one package per calling UID. If the package
- * name does not match the calling UID, then requests will be rejected.
+ * The vast majority of apps have only one package per calling UID.
+ * If an invalid package name is set, input streams may not be given permission to
+ * record when started.
+ *
+ * The package name is usually the applicationId in your app's build.gradle file.
  *
  * Available since API level 31.
  *
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 02ec411..0f2d7a2 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -49,6 +49,12 @@
 };
 typedef int32_t aaudio_policy_t;
 
+// Internal error codes. Only used by the framework.
+enum {
+    AAUDIO_INTERNAL_ERROR_BASE = -1000,
+    AAUDIO_ERROR_STANDBY,
+};
+
 /**
  * Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
  * or the older "Legacy" data path.
diff --git a/media/libaaudio/scripts/measure_device_power.py b/media/libaaudio/scripts/measure_device_power.py
new file mode 100755
index 0000000..1f90933
--- /dev/null
+++ b/media/libaaudio/scripts/measure_device_power.py
@@ -0,0 +1,288 @@
+#!/usr/bin/python3
+"""
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+"""
+
+'''
+Measure CPU related power on Pixel 6 or later devices using ODPM,
+the On Device Power Measurement tool.
+Generate a CSV report for putting in a spreadsheet
+'''
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import time
+
+# defaults
+PRE_DELAY_SECONDS = 0.5 # time to sleep before command to avoid adb unroot error
+DEFAULT_NUM_ITERATIONS = 5
+DEFAULT_FILE_NAME = 'energy_commands.txt'
+
+'''
+Default rail assignments
+philburk-macbookpro3:expt philburk$ adb shell cat /sys/bus/iio/devices/iio\:device0/energy_value
+t=349894
+CH0(T=349894)[S10M_VDD_TPU], 5578756
+CH1(T=349894)[VSYS_PWR_MODEM], 29110940
+CH2(T=349894)[VSYS_PWR_RFFE], 3166046
+CH3(T=349894)[S2M_VDD_CPUCL2], 30203502
+CH4(T=349894)[S3M_VDD_CPUCL1], 23377533
+CH5(T=349894)[S4M_VDD_CPUCL0], 46356942
+CH6(T=349894)[S5M_VDD_INT], 10771876
+CH7(T=349894)[S1M_VDD_MIF], 21091363
+philburk-macbookpro3:expt philburk$ adb shell cat /sys/bus/iio/devices/iio\:device1/energy_value
+t=359458
+CH0(T=359458)[VSYS_PWR_WLAN_BT], 45993209
+CH1(T=359458)[L2S_VDD_AOC_RET], 2822928
+CH2(T=359458)[S9S_VDD_AOC], 6923706
+CH3(T=359458)[S5S_VDDQ_MEM], 4658202
+CH4(T=359458)[S10S_VDD2L], 5506273
+CH5(T=359458)[S4S_VDD2H_MEM], 14254574
+CH6(T=359458)[S2S_VDD_G3D], 5315420
+CH7(T=359458)[VSYS_PWR_DISPLAY], 81221665
+'''
+
+'''
+LDO2M(L2M_ALIVE):DDR  -> DRAM Array Core Power
+BUCK4S(S4S_VDD2H_MEM):DDR -> Normal operation data and control path logic circuits
+BUCK5S(S5S_VDDQ_MEM):DDR -> LPDDR I/O interface
+BUCK10S(S10S_VDD2L):DDR  -> DVFSC (1600Mbps or lower) operation data and control path logic circuits
+BUCK1M (S1M_VDD_MIF):  SoC side Memory InterFace and Controller
+'''
+
+# Map between rail name and human readable name.
+ENERGY_DICTIONARY = { \
+        'S4M_VDD_CPUCL0': 'CPU0', \
+        'S3M_VDD_CPUCL1': 'CPU1', \
+        'S2M_VDD_CPUCL2': 'CPU2', \
+        'S1M_VDD_MIF': 'MIF', \
+        'L2M_ALIVE': 'DDRAC', \
+        'S4S_VDD2H_MEM': 'DDRNO', \
+        'S10S_VDD2L': 'DDR16', \
+        'S5S_VDDQ_MEM': 'DDRIO', \
+        'VSYS_PWR_DISPLAY': 'SCREEN'}
+
+SORTED_ENERGY_LIST = sorted(ENERGY_DICTIONARY, key=ENERGY_DICTIONARY.get)
+
+# Sometimes adb returns 1 for no apparent reason.
+# So try several times.
+# @return 0 on success
+def adbTryMultiple(command):
+    returnCode = 1
+    count = 0
+    limit = 5
+    while count < limit and returnCode != 0:
+        print(('Try to adb {} {} of {}'.format(command, count, limit)))
+        subprocess.call(["adb", "wait-for-device"])
+        time.sleep(PRE_DELAY_SECONDS)
+        returnCode = subprocess.call(["adb", command])
+        print(('returnCode = {}'.format(returnCode)))
+        count += 1
+    return returnCode
+
+# Sometimes "adb root" returns 1!
+# So try several times.
+# @return 0 on success
+def adbRoot():
+    return adbTryMultiple("root");
+
+# Sometimes "adb unroot" returns 1!
+# So try several times.
+# @return 0 on success
+def adbUnroot():
+    return adbTryMultiple("unroot");
+
+# @param commandString String containing shell command
+# @return Both the stdout and stderr of the commands run
+def runCommand(commandString):
+    print(commandString)
+    if commandString == "adb unroot":
+        result = adbUnroot()
+    elif commandString == "adb root":
+        result = adbRoot()
+    else:
+        commandArray = commandString.split(' ')
+        result = subprocess.run(commandArray, check=True, capture_output=True).stdout
+    return result
+
+# @param commandString String containing ADB command
+# @return Both the stdout and stderr of the commands run
+def adbCommand(commandString):
+    if commandString == "unroot":
+        result = adbUnroot()
+    elif commandString == "root":
+        result = adbRoot()
+    else:
+        print(("adb " + commandString))
+        commandArray = ["adb"] + commandString.split(' ')
+        subprocess.call(["adb", "wait-for-device"])
+        result = subprocess.run(commandArray, check=True, capture_output=True).stdout
+    return result
+
+# Parse a line that looks like "CH3(T=10697635)[S2M_VDD_CPUCL2], 116655335"
+# Use S2M_VDD_CPUCL2 as the tag and set value to the number
+# in the report dictionary.
+def parseEnergyValue(string):
+    return tuple(re.split('\[|\], +', string)[1:])
+
+# Read accumulated energy into a dictionary.
+def measureEnergyForDevice(deviceIndex, report):
+    # print("measureEnergyForDevice " + str(deviceIndex))
+    tableBytes = adbCommand( \
+            'shell cat /sys/bus/iio/devices/iio\:device{}/energy_value'\
+            .format(deviceIndex))
+    table = tableBytes.decode("utf-8")
+    # print(table)
+    for count, line in enumerate(table.splitlines()):
+        if count > 0:
+            tagEnergy = parseEnergyValue(line)
+            report[tagEnergy[0]] = int(tagEnergy[1].strip())
+    # print(report)
+
+def measureEnergyOnce():
+    adbCommand("root")
+    report = {}
+    d0 = measureEnergyForDevice(0, report)
+    d1 = measureEnergyForDevice(1, report)
+    adbUnroot()
+    return report
+
+# Subtract numeric values for matching keys.
+def subtractReports(A, B):
+    return {x: A[x] - B[x] for x in A if x in B}
+
+# Add numeric values for matching keys.
+def addReports(A, B):
+    return {x: A[x] + B[x] for x in A if x in B}
+
+# Divide numeric values by divisor.
+# @return Modified copy of report.
+def divideReport(report, divisor):
+    return {key: val / divisor for key, val in list(report.items())}
+
+# Generate a dictionary that is the difference between two measurements over time.
+def measureEnergyOverTime(duration):
+    report1 = measureEnergyOnce()
+    print(("Measure energy for " + str(duration) + " seconds."))
+    time.sleep(duration)
+    report2 = measureEnergyOnce()
+    return subtractReports(report2, report1)
+
+# Generate a CSV string containing the human readable headers.
+def formatEnergyHeader():
+    header = ""
+    for tag in SORTED_ENERGY_LIST:
+        header += ENERGY_DICTIONARY[tag] + ", "
+    return header
+
+# Generate a CSV string containing the numeric values.
+def formatEnergyData(report):
+    data = ""
+    for tag in SORTED_ENERGY_LIST:
+        if tag in list(report.keys()):
+            data += str(report[tag]) + ", "
+        else:
+            data += "-1,"
+    return data
+
+def printEnergyReport(report):
+    s = "\n"
+    s += "Values are in microWattSeconds\n"
+    s += "Report below is CSV format for pasting into a spreadsheet:\n"
+    s += formatEnergyHeader() + "\n"
+    s += formatEnergyData(report) + "\n"
+    print(s)
+
+# Generate a dictionary that is the difference between two measurements
+# before and after executing the command.
+def measureEnergyForCommand(command):
+    report1 = measureEnergyOnce()
+    print(("Measure energy for:  " + command))
+    result = runCommand(command)
+    report2 = measureEnergyOnce()
+    # print(result)
+    return subtractReports(report2, report1)
+
+# Average the results of several measurements for one command.
+def averageEnergyForCommand(command, count):
+    print("=================== #0\n")
+    sumReport = measureEnergyForCommand(command)
+    for i in range(1, count):
+        print(("=================== #" + str(i) + "\n"))
+        report = measureEnergyForCommand(command)
+        sumReport = addReports(sumReport, report)
+    print(sumReport)
+    return divideReport(sumReport, count)
+
+# Parse a list of commands in a file.
+# Lines ending in "\" are continuation lines.
+# Lines beginning with "#" are comments.
+def measureEnergyForCommands(fileName):
+    finalReport = "------------------------------------\n"
+    finalReport += "comment, command, " + formatEnergyHeader() + "\n"
+    comment = ""
+    try:
+        fp = open(fileName)
+        line = fp.readline()
+        while line:
+            command = line.strip()
+            if command.startswith("#"):
+                # ignore comment
+                print((command + "\n"))
+                comment = command[1:].strip() # remove leading '#'
+            elif command.endswith('\\'):
+                command = command[:-1].strip() # remove trailing '\'
+                runCommand(command)
+            elif command:
+                report = averageEnergyForCommand(command, DEFAULT_NUM_ITERATIONS)
+                finalReport += comment + ", " + command + ", " + formatEnergyData(report) + "\n"
+                print(finalReport)
+            line = fp.readline()
+    finally:
+        fp.close()
+    return finalReport
+
+def main():
+    # parse command line args
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-s', '--seconds',
+            help="Measure power for N seconds. Ignore scriptFile.",
+            type=float)
+    parser.add_argument("fileName",
+            nargs = '?',
+            help="Path to file containing commands to be measured."
+                    + " Default path = " + DEFAULT_FILE_NAME + "."
+                    + " Lines ending in '\' are continuation lines."
+                    + " Lines beginning with '#' are comments.",
+                    default=DEFAULT_FILE_NAME)
+    args=parser.parse_args();
+
+    print(("seconds  = " + str(args.seconds)))
+    print(("fileName = " + str(args.fileName)))
+    # Process command line
+    if args.seconds:
+        report = measureEnergyOverTime(args.seconds)
+        printEnergyReport(report)
+    else:
+        report = measureEnergyForCommands(args.fileName)
+        print(report)
+    print("Finished.\n")
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/media/libaaudio/scripts/setup_odpm_cpu_rails.sh b/media/libaaudio/scripts/setup_odpm_cpu_rails.sh
new file mode 100755
index 0000000..e9241b9
--- /dev/null
+++ b/media/libaaudio/scripts/setup_odpm_cpu_rails.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# Configure ODPM rails to measure CPU specific power.
+# See go/odpm-p21-userguide
+
+adb root
+
+# LDO2M(L2M_ALIVE) - DRAM Array Core Power
+adb shell 'echo "CH0=LDO2M" > /sys/bus/iio/devices/iio\:device0/enabled_rails'
+
+# These are the defaults.
+# BUCK2M(S2M_VDD_CPUCL2):CPU(BIG)
+# adb shell 'echo "CH3=BUCK2M" > /sys/bus/iio/devices/iio\:device0/enabled_rails'
+# BUCK3M(S3M_VDD_CPUCL1):CPU(MID)
+# adb shell 'echo "CH4=BUCK3M" > /sys/bus/iio/devices/iio\:device0/enabled_rails'
+# BUCK4M(S4M_VDD_CPUCL0):CPU(LITTLE)
+# adb shell 'echo "CH5=BUCK4M" > /sys/bus/iio/devices/iio\:device0/enabled_rails'
+# BUCK1M(S1M_VDD_MIF):MIF
+# adb shell 'echo "CH7=BUCK1M" > /sys/bus/iio/devices/iio\:device0/enabled_rails'
+
+# These are default on device1.
+# BUCK5S(S5S_VDDQ_MEM):DDR
+# adb shell 'echo "CH3=BUCK5S" > /sys/bus/iio/devices/iio\:device1/enabled_rails'
+# BUCK10S(S10S_VDD2L):DDR
+# adb shell 'echo "CH4=BUCK10S" > /sys/bus/iio/devices/iio\:device1/enabled_rails'
+# BUCK4S(S4S_VDD2H_MEM):DDR
+# adb shell 'echo "CH5=BUCK4S" > /sys/bus/iio/devices/iio\:device1/enabled_rails'
+
+adb shell 'cat /sys/bus/iio/devices/iio\:device0/enabled_rails'
+adb shell 'cat /sys/bus/iio/devices/iio\:device1/enabled_rails'
+
+adb unroot
+
diff --git a/media/libaaudio/scripts/synthmark_tests.txt b/media/libaaudio/scripts/synthmark_tests.txt
new file mode 100644
index 0000000..8b6d47e
--- /dev/null
+++ b/media/libaaudio/scripts/synthmark_tests.txt
@@ -0,0 +1,50 @@
+# Measure energy consumption with synthmark.
+
+# None
+adb shell synthmark -tj -n1 -N50 -B2 -z0
+adb shell synthmark -tj -n1 -N75 -B2 -z0
+adb shell synthmark -tj -n1 -N100 -B2 -z0
+
+# ADPF PID
+adb shell synthmark -tj -n1 -N50 -B2 -z1
+adb shell synthmark -tj -n1 -N75 -B2 -z1
+adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# ADPF <400 RR
+# adb root \
+# adb shell setprop vendor.powerhal.adpf.uclamp_min.high_limit 400 \
+# adb shell synthmark -tj -n1 -N50 -B2 -z1
+# adb shell synthmark -tj -n1 -N75 -B2 -z1
+# adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# ADPF <500 RR
+# adb root \
+# adb shell setprop vendor.powerhal.adpf.uclamp_min.high_limit 500 \
+# adb shell synthmark -tj -n1 -N50 -B2 -z1
+# adb shell synthmark -tj -n1 -N75 -B2 -z1
+# adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# ADPF <600 RR
+# adb root \
+# adb shell setprop vendor.powerhal.adpf.uclamp_min.high_limit 600 \
+# adb shell synthmark -tj -n1 -N50 -B2 -z1
+# adb shell synthmark -tj -n1 -N75 -B2 -z1
+# adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# uclamp
+# adb root \
+# adb shell synthmark -tj -n1 -N75 -B2 -u1
+
+# steady
+# adb shell synthmark -tj -n75 -B2 -u0
+
+# CPU affinity
+# adb shell synthmark -tj -n1 -N75 -B2 -u0 -c1
+# adb shell synthmark -tj -n1 -N75 -B2 -u0 -c4
+# adb shell synthmark -tj -n1 -N75 -B2 -u0 -c6
+
+# steady + affinity
+# adb shell synthmark -tj -n75 -B2 -u0 -c1
+# adb shell synthmark -tj -n75 -B2 -u0 -c4
+# adb shell synthmark -tj -n75 -B2 -u0 -c6
+
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 33a5c7f..363d219 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -7,6 +7,65 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+tidy_errors = [
+    // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+    // For many categories, the checks are too many to specify individually.
+    // Feel free to disable as needed - as warnings are generally ignored,
+    // we treat warnings as errors.
+    "android-*",
+    "bugprone-*",
+    "cert-*",
+    "clang-analyzer-security*",
+    "google-*",
+    "misc-*",
+    //"modernize-*",  // explicitly list the modernize as they can be subjective.
+    "modernize-avoid-bind",
+    //"modernize-avoid-c-arrays", // std::array<> can be verbose
+    "modernize-concat-nested-namespaces",
+    //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+    "modernize-deprecated-ios-base-aliases",
+    "modernize-loop-convert",
+    "modernize-make-shared",
+    "modernize-make-unique",
+    "modernize-pass-by-value",
+    "modernize-raw-string-literal",
+    "modernize-redundant-void-arg",
+    "modernize-replace-auto-ptr",
+    "modernize-replace-random-shuffle",
+    "modernize-return-braced-init-list",
+    "modernize-shrink-to-fit",
+    "modernize-unary-static-assert",
+    // "modernize-use-auto", // found in AAudioAudio.cpp
+    "modernize-use-bool-literals",
+    "modernize-use-default-member-init",
+    "modernize-use-emplace",
+    "modernize-use-equals-default",
+    "modernize-use-equals-delete",
+    // "modernize-use-nodiscard", // found in aidl generated files
+    "modernize-use-noexcept",
+    "modernize-use-nullptr",
+    // "modernize-use-override", // found in aidl generated files
+    // "modernize-use-trailing-return-type", // not necessarily more readable
+    "modernize-use-transparent-functors",
+    "modernize-use-uncaught-exceptions",
+    // "modernize-use-using", // found typedef in several files
+    "performance-*",
+
+    // Remove some pedantic stylistic requirements.
+    "-android-cloexec-dup", // found in SharedMemoryParcelable.cpp
+    "-bugprone-macro-parentheses", // found in SharedMemoryParcelable.h
+    "-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
+
+    "-google-readability-casting", // C++ casts not always necessary and may be verbose
+    "-google-readability-todo", // do not require TODO(info)
+    "-google-build-using-namespace", // Reenable and fix later.
+    "-google-global-names-in-headers", // found in several files
+
+    "-misc-non-private-member-variables-in-classes", // found in aidl generated files
+
+    "-performance-no-int-to-ptr", // found in SharedMemoryParcelable.h
+]
+
 cc_library {
     name: "libaaudio",
 
@@ -34,7 +93,6 @@
         "-Wno-unused-parameter",
         "-Wall",
         "-Werror",
-
         // By default, all symbols are hidden.
         // "-fvisibility=hidden",
         // AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
@@ -52,7 +110,7 @@
         "libcutils",
         "libutils",
         "libbinder",
-        "libpermission",
+        "framework-permission-aidl-cpp",
     ],
 
     sanitize: {
@@ -64,6 +122,13 @@
         symbol_file: "libaaudio.map.txt",
         versions: ["28"],
     },
+
+    tidy: true,
+    tidy_checks: tidy_errors,
+    tidy_checks_as_errors: tidy_errors,
+    tidy_flags: [
+        "-format-style=file",
+    ]
 }
 
 cc_library {
@@ -102,6 +167,8 @@
         "libbinder",
         "framework-permission-aidl-cpp",
         "aaudio-aidl-cpp",
+        "android.media.audio.common.types-V1-cpp",
+        "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
     ],
 
@@ -139,10 +206,16 @@
         "binding/RingBufferParcelable.cpp",
         "binding/SharedMemoryParcelable.cpp",
         "binding/SharedRegionParcelable.cpp",
-        "flowgraph/AudioProcessorBase.cpp",
+        "flowgraph/ChannelCountConverter.cpp",
         "flowgraph/ClipToRange.cpp",
+        "flowgraph/FlowGraphNode.cpp",
+        "flowgraph/ManyToMultiConverter.cpp",
+        "flowgraph/MonoBlend.cpp",
         "flowgraph/MonoToMultiConverter.cpp",
+        "flowgraph/MultiToMonoConverter.cpp",
+        "flowgraph/MultiToManyConverter.cpp",
         "flowgraph/RampLinear.cpp",
+        "flowgraph/SampleRateConverter.cpp",
         "flowgraph/SinkFloat.cpp",
         "flowgraph/SinkI16.cpp",
         "flowgraph/SinkI24.cpp",
@@ -151,11 +224,26 @@
         "flowgraph/SourceI16.cpp",
         "flowgraph/SourceI24.cpp",
         "flowgraph/SourceI32.cpp",
+        "flowgraph/resampler/IntegerRatio.cpp",
+        "flowgraph/resampler/LinearResampler.cpp",
+        "flowgraph/resampler/MultiChannelResampler.cpp",
+        "flowgraph/resampler/PolyphaseResampler.cpp",
+        "flowgraph/resampler/PolyphaseResamplerMono.cpp",
+        "flowgraph/resampler/PolyphaseResamplerStereo.cpp",
+        "flowgraph/resampler/SincResampler.cpp",
+        "flowgraph/resampler/SincResamplerStereo.cpp",
     ],
     sanitize: {
         integer_overflow: true,
         misc_undefined: ["bounds"],
     },
+
+    tidy: true,
+    tidy_checks: tidy_errors,
+    tidy_checks_as_errors: tidy_errors,
+    tidy_flags: [
+        "-format-style=file",
+    ]
 }
 
 aidl_interface {
@@ -172,19 +260,15 @@
         "binding/aidl/aaudio/IAAudioService.aidl",
     ],
     imports: [
-        "audio_common-aidl",
+        "android.media.audio.common.types-V1",
+        "audioclient-types-aidl",
         "shared-file-region-aidl",
-        "framework-permission-aidl"
+        "framework-permission-aidl",
     ],
     backend:
     {
-        cpp: {
-            enabled: true,
-        },
         java: {
-            // TODO: need to have audio_common-aidl available in Java to enable
-            //       this.
-            enabled: false,
+            sdk_version: "module_current",
         },
     },
 }
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
index 6e3a1c8..42d81ca 100644
--- a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
@@ -124,4 +124,16 @@
     return result;
 }
 
+aaudio_result_t AAudioBinderAdapter::exitStandby(aaudio_handle_t streamHandle,
+                                                 AudioEndpointParcelable &endpointOut) {
+    aaudio_result_t result;
+    Endpoint endpoint;
+    Status status = mDelegate->exitStandby(streamHandle, &endpoint, &result);
+    if (!status.isOk()) {
+        result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
+    }
+    endpointOut = std::move(endpoint);
+    return result;
+}
+
 }  // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.h b/media/libaaudio/src/binding/AAudioBinderAdapter.h
index 5e9ab57..d170783 100644
--- a/media/libaaudio/src/binding/AAudioBinderAdapter.h
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.h
@@ -57,6 +57,9 @@
     aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
                                           pid_t clientThreadId) override;
 
+    aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+                                AudioEndpointParcelable &parcelable) override;
+
 private:
     IAAudioService* const mDelegate;
 };
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index fa5a2da..8e5facc 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -36,13 +36,10 @@
 using android::IServiceManager;
 using android::defaultServiceManager;
 using android::interface_cast;
-using android::IInterface;
 using android::Mutex;
 using android::ProcessState;
 using android::sp;
 using android::status_t;
-using android::wp;
-using android::binder::Status;
 
 using namespace aaudio;
 
@@ -93,7 +90,7 @@
                     ALOGE("%s() - linkToDeath() returned %d", __func__, status);
                 }
                 aaudioService = interface_cast<IAAudioService>(binder);
-                mAdapter.reset(new Adapter(aaudioService, mAAudioClient));
+                mAdapter = std::make_shared<Adapter>(aaudioService, mAAudioClient);
                 needToRegister = true;
                 // Make sure callbacks can be received by mAAudioClient
                 ProcessState::self()->startThreadPool();
@@ -204,3 +201,11 @@
 
     return service->unregisterAudioThread(streamHandle, clientThreadId);
 }
+
+aaudio_result_t AAudioBinderClient::exitStandby(aaudio_handle_t streamHandle,
+                                                AudioEndpointParcelable &endpointOut) {
+    std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
+    if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+
+    return service->exitStandby(streamHandle, endpointOut);
+}
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index 6a7b639..0968f4c 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -108,7 +108,10 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
-    void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {
+    aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+                                AudioEndpointParcelable &endpointOut) override;
+
+    void onStreamChange(aaudio_handle_t /*handle*/, int32_t /*opcode*/, int32_t /*value*/) {
         // TODO This is just a stub so we can have a client Binder to pass to the service.
         // TODO Implemented in a later CL.
         ALOGW("onStreamChange called!");
@@ -116,7 +119,7 @@
 
     class AAudioClient : public android::IBinder::DeathRecipient, public BnAAudioClient {
     public:
-        AAudioClient(android::wp<AAudioBinderClient> aaudioBinderClient)
+        explicit AAudioClient(const android::wp<AAudioBinderClient>& aaudioBinderClient)
                 : mBinderClient(aaudioBinderClient) {
         }
 
@@ -150,10 +153,10 @@
     class Adapter : public AAudioBinderAdapter {
     public:
         Adapter(const android::sp<IAAudioService>& delegate,
-                const android::sp<AAudioClient>& aaudioClient)
+                android::sp<AAudioClient> aaudioClient)
                 : AAudioBinderAdapter(delegate.get()),
                   mDelegate(delegate),
-                  mAAudioClient(aaudioClient) {}
+                  mAAudioClient(std::move(aaudioClient)) {}
 
         virtual ~Adapter() {
             if (mDelegate != nullptr) {
diff --git a/media/libaaudio/src/binding/AAudioServiceInterface.h b/media/libaaudio/src/binding/AAudioServiceInterface.h
index 5d11512..e901767 100644
--- a/media/libaaudio/src/binding/AAudioServiceInterface.h
+++ b/media/libaaudio/src/binding/AAudioServiceInterface.h
@@ -37,7 +37,7 @@
 class AAudioServiceInterface {
 public:
 
-    AAudioServiceInterface() {};
+    AAudioServiceInterface() = default;
     virtual ~AAudioServiceInterface() = default;
 
     virtual void registerClient(const android::sp<IAAudioClient>& client) = 0;
@@ -95,6 +95,16 @@
 
     virtual aaudio_result_t stopClient(aaudio_handle_t streamHandle,
                                        audio_port_handle_t clientHandle) = 0;
+
+    /**
+     * Exit the standby mode.
+     *
+     * @param streamHandle the stream handle
+     * @param parcelable contains new data queue information
+     * @return the result of the execution
+     */
+    virtual aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+                                        AudioEndpointParcelable &parcelable) = 0;
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index bec4393..b60bac2 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -23,11 +23,13 @@
 #include <sys/mman.h>
 #include <aaudio/AAudio.h>
 
+#include <media/AidlConversion.h>
+
 #include "binding/AAudioStreamConfiguration.h"
 
 using namespace aaudio;
 
-using android::media::audio::common::AudioFormat;
+using android::media::audio::common::AudioFormatDescription;
 
 AAudioStreamConfiguration::AAudioStreamConfiguration(const StreamParameters& parcelable) {
     setChannelMask(parcelable.channelMask);
@@ -35,8 +37,9 @@
     setDeviceId(parcelable.deviceId);
     static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(parcelable.sharingMode));
     setSharingMode(parcelable.sharingMode);
-    static_assert(sizeof(audio_format_t) == sizeof(parcelable.audioFormat));
-    setFormat(static_cast<audio_format_t>(parcelable.audioFormat));
+    auto convFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
+            parcelable.audioFormat);
+    setFormat(convFormat.ok() ? convFormat.value() : AUDIO_FORMAT_INVALID);
     static_assert(sizeof(aaudio_direction_t) == sizeof(parcelable.direction));
     setDirection(parcelable.direction);
     static_assert(sizeof(audio_usage_t) == sizeof(parcelable.usage));
@@ -75,8 +78,14 @@
     result.deviceId = getDeviceId();
     static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(result.sharingMode));
     result.sharingMode = getSharingMode();
-    static_assert(sizeof(audio_format_t) == sizeof(result.audioFormat));
-    result.audioFormat = static_cast<AudioFormat>(getFormat());
+    auto convAudioFormat = android::legacy2aidl_audio_format_t_AudioFormatDescription(getFormat());
+    if (convAudioFormat.ok()) {
+        result.audioFormat = convAudioFormat.value();
+    } else {
+        result.audioFormat = AudioFormatDescription{};
+        result.audioFormat.type =
+                android::media::audio::common::AudioFormatType::SYS_RESERVED_INVALID;
+    }
     static_assert(sizeof(aaudio_direction_t) == sizeof(result.direction));
     result.direction = getDirection();
     static_assert(sizeof(audio_usage_t) == sizeof(result.usage));
diff --git a/media/libaaudio/src/binding/AAudioStreamRequest.cpp b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
index 8d90034..a4cc2bd 100644
--- a/media/libaaudio/src/binding/AAudioStreamRequest.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamRequest.cpp
@@ -30,7 +30,7 @@
 using namespace aaudio;
 
 AAudioStreamRequest::AAudioStreamRequest(const StreamRequest& parcelable) :
-        mConfiguration(std::move(parcelable.params)),
+        mConfiguration(parcelable.params),
         mAttributionSource(parcelable.attributionSource),
         mSharingModeMatchRequired(parcelable.sharingModeMatchRequired),
         mInService(parcelable.inService) {
@@ -38,7 +38,7 @@
 
 StreamRequest AAudioStreamRequest::parcelable() const {
     StreamRequest result;
-    result.params = std::move(mConfiguration).parcelable();
+    result.params = mConfiguration.parcelable();
     result.attributionSource = mAttributionSource;
     result.sharingModeMatchRequired = mSharingModeMatchRequired;
     result.inService = mInService;
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index aa4ac27..b1262df 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -29,17 +29,15 @@
 #include "binding/AudioEndpointParcelable.h"
 
 using android::base::unique_fd;
-using android::media::SharedFileRegion;
-using android::NO_ERROR;
 using android::status_t;
 
 using namespace aaudio;
 
 AudioEndpointParcelable::AudioEndpointParcelable(Endpoint&& parcelable)
-        : mUpMessageQueueParcelable(std::move(parcelable.upMessageQueueParcelable)),
-          mDownMessageQueueParcelable(std::move(parcelable.downMessageQueueParcelable)),
-          mUpDataQueueParcelable(std::move(parcelable.upDataQueueParcelable)),
-          mDownDataQueueParcelable(std::move(parcelable.downDataQueueParcelable)),
+        : mUpMessageQueueParcelable(parcelable.upMessageQueueParcelable),
+          mDownMessageQueueParcelable(parcelable.downMessageQueueParcelable),
+          mUpDataQueueParcelable(parcelable.upDataQueueParcelable),
+          mDownDataQueueParcelable(parcelable.downDataQueueParcelable),
           mNumSharedMemories(parcelable.sharedMemories.size()) {
     for (size_t i = 0; i < parcelable.sharedMemories.size() && i < MAX_SHARED_MEMORIES; ++i) {
         // Re-construct.
@@ -56,10 +54,10 @@
 
 Endpoint AudioEndpointParcelable::parcelable()&& {
     Endpoint result;
-    result.upMessageQueueParcelable = std::move(mUpMessageQueueParcelable).parcelable();
-    result.downMessageQueueParcelable = std::move(mDownMessageQueueParcelable).parcelable();
-    result.upDataQueueParcelable = std::move(mUpDataQueueParcelable).parcelable();
-    result.downDataQueueParcelable = std::move(mDownDataQueueParcelable).parcelable();
+    result.upMessageQueueParcelable = mUpMessageQueueParcelable.parcelable();
+    result.downMessageQueueParcelable = mDownMessageQueueParcelable.parcelable();
+    result.upDataQueueParcelable = mUpDataQueueParcelable.parcelable();
+    result.downDataQueueParcelable = mDownDataQueueParcelable.parcelable();
     result.sharedMemories.reserve(std::min(mNumSharedMemories, MAX_SHARED_MEMORIES));
     for (size_t i = 0; i < mNumSharedMemories && i < MAX_SHARED_MEMORIES; ++i) {
         result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
@@ -81,6 +79,22 @@
     return index;
 }
 
+void AudioEndpointParcelable::closeDataFileDescriptor() {
+    const int32_t curDataMemoryIndex = mDownDataQueueParcelable.getSharedMemoryIndex();
+    mSharedMemories[curDataMemoryIndex].closeAndReleaseFd();
+}
+
+void AudioEndpointParcelable::updateDataFileDescriptor(
+        AudioEndpointParcelable* endpointParcelable) {
+    const int32_t curDataMemoryIndex = mDownDataQueueParcelable.getSharedMemoryIndex();
+    const int32_t newDataMemoryIndex =
+            endpointParcelable->mDownDataQueueParcelable.getSharedMemoryIndex();
+    mSharedMemories[curDataMemoryIndex].close();
+    mSharedMemories[curDataMemoryIndex].setup(
+            endpointParcelable->mSharedMemories[newDataMemoryIndex]);
+    mDownDataQueueParcelable.updateMemory(endpointParcelable->mDownDataQueueParcelable);
+}
+
 aaudio_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
     aaudio_result_t result = mUpMessageQueueParcelable.resolve(mSharedMemories,
                                                            &descriptor->upMessageQueueDescriptor);
@@ -94,6 +108,10 @@
     return result;
 }
 
+aaudio_result_t AudioEndpointParcelable::resolveDataQueue(RingBufferDescriptor *descriptor) {
+    return mDownDataQueueParcelable.resolve(mSharedMemories, descriptor);
+}
+
 aaudio_result_t AudioEndpointParcelable::close() {
     int err = 0;
     for (int i = 0; i < mNumSharedMemories; i++) {
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index 5237a1a..5d2c38f 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -43,7 +43,7 @@
     // Ctor/assignment from a parcelable representation.
     // Since the parcelable object owns unique FDs (for shared memory blocks), move semantics are
     // provided to avoid the need to dupe.
-    AudioEndpointParcelable(Endpoint&& parcelable);
+    explicit AudioEndpointParcelable(Endpoint&& parcelable);
     AudioEndpointParcelable& operator=(Endpoint&& parcelable);
 
     /**
@@ -52,7 +52,20 @@
      */
     int32_t addFileDescriptor(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
+    /**
+     * Close current data file descriptor. The duplicated file descriptor will be close.
+     */
+    void closeDataFileDescriptor();
+
+    /**
+     * Update current data file descriptor with given endpoint parcelable.
+     * @param endpointParcelable an endpoint parcelable that contains new data file
+     *                           descriptor information
+     */
+    void updateDataFileDescriptor(AudioEndpointParcelable* endpointParcelable);
+
     aaudio_result_t resolve(EndpointDescriptor *descriptor);
+    aaudio_result_t resolveDataQueue(RingBufferDescriptor *descriptor);
 
     aaudio_result_t close();
 
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index a4b3cec..3bc51d0 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -30,9 +30,10 @@
 using namespace aaudio;
 
 RingBufferParcelable::RingBufferParcelable(const RingBuffer& parcelable)
-        : mReadCounterParcelable(std::move(parcelable.readCounterParcelable)),
-          mWriteCounterParcelable(std::move(parcelable.writeCounterParcelable)),
-          mDataParcelable(std::move(parcelable.dataParcelable)),
+        : mReadCounterParcelable(parcelable.readCounterParcelable),
+          mWriteCounterParcelable(parcelable.writeCounterParcelable),
+          mDataParcelable(parcelable.dataParcelable),
+          mSharedMemoryIndex(parcelable.sharedMemoryIndex),
           mBytesPerFrame(parcelable.bytesPerFrame),
           mFramesPerBurst(parcelable.framesPerBurst),
           mCapacityInFrames(parcelable.capacityInFrames),
@@ -42,9 +43,10 @@
 
 RingBuffer RingBufferParcelable::parcelable() const {
     RingBuffer result;
-    result.readCounterParcelable = std::move(mReadCounterParcelable).parcelable();
-    result.writeCounterParcelable = std::move(mWriteCounterParcelable).parcelable();
-    result.dataParcelable = std::move(mDataParcelable).parcelable();
+    result.readCounterParcelable = mReadCounterParcelable.parcelable();
+    result.writeCounterParcelable = mWriteCounterParcelable.parcelable();
+    result.dataParcelable = mDataParcelable.parcelable();
+    result.sharedMemoryIndex = mSharedMemoryIndex;
     result.bytesPerFrame = mBytesPerFrame;
     result.framesPerBurst = mFramesPerBurst;
     result.capacityInFrames = mCapacityInFrames;
@@ -60,6 +62,7 @@
                  int32_t readCounterOffset,
                  int32_t writeCounterOffset,
                  int32_t counterSizeBytes) {
+    mSharedMemoryIndex = sharedMemoryIndex;
     mReadCounterParcelable.setup(sharedMemoryIndex, readCounterOffset, counterSizeBytes);
     mWriteCounterParcelable.setup(sharedMemoryIndex, writeCounterOffset, counterSizeBytes);
     mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
@@ -68,12 +71,13 @@
 void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
                  int32_t dataMemoryOffset,
                  int32_t dataSizeInBytes) {
+    mSharedMemoryIndex = sharedMemoryIndex;
     mReadCounterParcelable.setup(sharedMemoryIndex, 0, 0);
     mWriteCounterParcelable.setup(sharedMemoryIndex, 0, 0);
     mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
 }
 
-int32_t RingBufferParcelable::getBytesPerFrame() {
+int32_t RingBufferParcelable::getBytesPerFrame() const {
     return mBytesPerFrame;
 }
 
@@ -81,7 +85,7 @@
     mBytesPerFrame = bytesPerFrame;
 }
 
-int32_t RingBufferParcelable::getFramesPerBurst() {
+int32_t RingBufferParcelable::getFramesPerBurst() const {
     return mFramesPerBurst;
 }
 
@@ -89,7 +93,7 @@
     mFramesPerBurst = framesPerBurst;
 }
 
-int32_t RingBufferParcelable::getCapacityInFrames() {
+int32_t RingBufferParcelable::getCapacityInFrames() const {
     return mCapacityInFrames;
 }
 
@@ -124,6 +128,14 @@
     return AAUDIO_OK;
 }
 
+void RingBufferParcelable::updateMemory(const RingBufferParcelable& parcelable) {
+    setupMemory(mSharedMemoryIndex, 0,
+                parcelable.getCapacityInFrames() * parcelable.getBytesPerFrame());
+    setBytesPerFrame(parcelable.getBytesPerFrame());
+    setFramesPerBurst(parcelable.getFramesPerBurst());
+    setCapacityInFrames(parcelable.getCapacityInFrames());
+}
+
 aaudio_result_t RingBufferParcelable::validate() const {
     if (mCapacityInFrames < 0 || mCapacityInFrames >= 32 * 1024) {
         ALOGE("invalid mCapacityInFrames = %d", mCapacityInFrames);
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.h b/media/libaaudio/src/binding/RingBufferParcelable.h
index 2508cea..29d0d86 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.h
+++ b/media/libaaudio/src/binding/RingBufferParcelable.h
@@ -46,15 +46,15 @@
                      int32_t dataMemoryOffset,
                      int32_t dataSizeInBytes);
 
-    int32_t getBytesPerFrame();
+    int32_t getBytesPerFrame() const;
 
     void setBytesPerFrame(int32_t bytesPerFrame);
 
-    int32_t getFramesPerBurst();
+    int32_t getFramesPerBurst() const;
 
     void setFramesPerBurst(int32_t framesPerBurst);
 
-    int32_t getCapacityInFrames();
+    int32_t getCapacityInFrames() const;
 
     void setCapacityInFrames(int32_t capacityInFrames);
 
@@ -62,6 +62,12 @@
 
     aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
 
+    void updateMemory(const RingBufferParcelable& parcelable);
+
+    int32_t getSharedMemoryIndex() const {
+        return mSharedMemoryIndex;
+    }
+
     void dump();
 
     // Extract a parcelable representation of this object.
@@ -71,6 +77,7 @@
     SharedRegionParcelable  mReadCounterParcelable;
     SharedRegionParcelable  mWriteCounterParcelable;
     SharedRegionParcelable  mDataParcelable;
+    int32_t                 mSharedMemoryIndex = -1;
     int32_t                 mBytesPerFrame = 0;     // index is in frames
     int32_t                 mFramesPerBurst = 0;    // for ISOCHRONOUS queues
     int32_t                 mCapacityInFrames = 0;  // zero if unused
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index eef238f..741aefc 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -32,7 +32,6 @@
 #include "binding/SharedMemoryParcelable.h"
 
 using android::base::unique_fd;
-using android::NO_ERROR;
 using android::status_t;
 using android::media::SharedFileRegion;
 
@@ -65,6 +64,10 @@
     mSizeInBytes = sizeInBytes;
 }
 
+void SharedMemoryParcelable::setup(const SharedMemoryParcelable &sharedMemoryParcelable) {
+    setup(sharedMemoryParcelable.mFd, sharedMemoryParcelable.mSizeInBytes);
+}
+
 aaudio_result_t SharedMemoryParcelable::close() {
     if (mResolvedAddress != MMAP_UNRESOLVED_ADDRESS) {
         int err = munmap(mResolvedAddress, mSizeInBytes);
@@ -77,8 +80,16 @@
     return AAUDIO_OK;
 }
 
+aaudio_result_t SharedMemoryParcelable::closeAndReleaseFd() {
+    aaudio_result_t result = close();
+    if (result == AAUDIO_OK) {
+        mFd.reset();
+    }
+    return result;
+}
+
 aaudio_result_t SharedMemoryParcelable::resolveSharedMemory(const unique_fd& fd) {
-    mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ | PROT_WRITE,
+    mResolvedAddress = (uint8_t *) mmap(nullptr, mSizeInBytes, PROT_READ | PROT_WRITE,
                                         MAP_SHARED, fd.get(), 0);
     if (mResolvedAddress == MMAP_UNRESOLVED_ADDRESS) {
         ALOGE("mmap() failed for fd = %d, nBytes = %" PRId64 ", errno = %s",
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 1f2c335..7762fef 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -52,12 +52,16 @@
      */
     void setup(const android::base::unique_fd& fd, int32_t sizeInBytes);
 
+    void setup(const SharedMemoryParcelable& sharedMemoryParcelable);
+
     // mmap() shared memory
     aaudio_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
 
     // munmap() any mapped memory
     aaudio_result_t close();
 
+    aaudio_result_t closeAndReleaseFd();
+
     int32_t getSizeInBytes();
 
     void dump();
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index 56b99c0..6fa109b 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -29,10 +29,7 @@
 #include "binding/SharedMemoryParcelable.h"
 #include "binding/SharedRegionParcelable.h"
 
-using android::NO_ERROR;
 using android::status_t;
-using android::Parcel;
-using android::Parcelable;
 
 using namespace aaudio;
 
diff --git a/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
index 44d2211..485c2e2 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/IAAudioService.aidl
@@ -78,4 +78,6 @@
 
     int unregisterAudioThread(int streamHandle,
                               int clientThreadId);
+
+    int exitStandby(int streamHandle, out Endpoint endpoint);
 }
diff --git a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
index a58b33a..dd64493 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
@@ -26,4 +26,5 @@
     int                 framesPerBurst;    // for ISOCHRONOUS queues
     int                 capacityInFrames;  // zero if unused
     int /* RingbufferFlags */ flags;  // = RingbufferFlags::NONE;
+    int                 sharedMemoryIndex;
 }
\ No newline at end of file
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index a6541e1..983e193 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -16,14 +16,14 @@
 
 package aaudio;
 
-import android.media.audio.common.AudioFormat;
+import android.media.audio.common.AudioFormatDescription;
 
 parcelable StreamParameters {
     int                                       channelMask;  //          = AAUDIO_UNSPECIFIED;
     int                                       sampleRate;  //           = AAUDIO_UNSPECIFIED;
     int                                       deviceId;  //             = AAUDIO_UNSPECIFIED;
     int /* aaudio_sharing_mode_t */           sharingMode;  //          = AAUDIO_SHARING_MODE_SHARED;
-    AudioFormat                               audioFormat;  //          = AUDIO_FORMAT_DEFAULT;
+    AudioFormatDescription                    audioFormat;  //          = AUDIO_FORMAT_DEFAULT;
     int /* aaudio_direction_t */              direction;  //            = AAUDIO_DIRECTION_OUTPUT;
     int /* aaudio_usage_t */                  usage;  //                = AAUDIO_UNSPECIFIED;
     int /* aaudio_content_type_t */           contentType;  //          = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 61b50f3..2ed3e3c 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -21,7 +21,10 @@
 #include "AAudioFlowGraph.h"
 
 #include <flowgraph/ClipToRange.h>
+#include <flowgraph/ManyToMultiConverter.h>
+#include <flowgraph/MonoBlend.h>
 #include <flowgraph/MonoToMultiConverter.h>
+#include <flowgraph/MultiToManyConverter.h>
 #include <flowgraph/RampLinear.h>
 #include <flowgraph/SinkFloat.h>
 #include <flowgraph/SinkI16.h>
@@ -32,17 +35,22 @@
 #include <flowgraph/SourceI24.h>
 #include <flowgraph/SourceI32.h>
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 aaudio_result_t AAudioFlowGraph::configure(audio_format_t sourceFormat,
                           int32_t sourceChannelCount,
                           audio_format_t sinkFormat,
-                          int32_t sinkChannelCount) {
-    AudioFloatOutputPort *lastOutput = nullptr;
+                          int32_t sinkChannelCount,
+                          bool useMonoBlend,
+                          float audioBalance,
+                          bool isExclusive) {
+    FlowGraphPortFloatOutput *lastOutput = nullptr;
 
     // TODO change back to ALOGD
-    ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d",
-          __func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount);
+    ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d, "
+          "useMonoBlend = %d, audioBalance = %f, isExclusive %d",
+          __func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount,
+          useMonoBlend, audioBalance, isExclusive);
 
     switch (sourceFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
@@ -63,10 +71,11 @@
     }
     lastOutput = &mSource->output;
 
-    // Apply volume as a ramp to avoid pops.
-    mVolumeRamp = std::make_unique<RampLinear>(sourceChannelCount);
-    lastOutput->connect(&mVolumeRamp->input);
-    lastOutput = &mVolumeRamp->output;
+    if (useMonoBlend) {
+        mMonoBlend = std::make_unique<MonoBlend>(sourceChannelCount);
+        lastOutput->connect(&mMonoBlend->input);
+        lastOutput = &mMonoBlend->output;
+    }
 
     // For a pure float graph, there is chance that the data range may be very large.
     // So we should clip to a reasonable value that allows a little headroom.
@@ -86,6 +95,26 @@
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
+    // Apply volume ramps for only exclusive streams.
+    if (isExclusive) {
+        // Apply volume ramps to set the left/right audio balance and target volumes.
+        // The signals will be decoupled, volume ramps will be applied, before the signals are
+        // combined again.
+        mMultiToManyConverter = std::make_unique<MultiToManyConverter>(sinkChannelCount);
+        mManyToMultiConverter = std::make_unique<ManyToMultiConverter>(sinkChannelCount);
+        lastOutput->connect(&mMultiToManyConverter->input);
+        for (int i = 0; i < sinkChannelCount; i++) {
+            mVolumeRamps.emplace_back(std::make_unique<RampLinear>(1));
+            mPanningVolumes.emplace_back(1.0f);
+            lastOutput = mMultiToManyConverter->outputs[i].get();
+            lastOutput->connect(&(mVolumeRamps[i].get()->input));
+            lastOutput = &(mVolumeRamps[i].get()->output);
+            lastOutput->connect(mManyToMultiConverter->inputs[i].get());
+        }
+        lastOutput = &mManyToMultiConverter->output;
+        setAudioBalance(audioBalance);
+    }
+
     switch (sinkFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
             mSink = std::make_unique<SinkFloat>(sinkChannelCount);
@@ -117,9 +146,32 @@
  * @param volume between 0.0 and 1.0
  */
 void AAudioFlowGraph::setTargetVolume(float volume) {
-    mVolumeRamp->setTarget(volume);
+    for (int i = 0; i < mVolumeRamps.size(); i++) {
+        mVolumeRamps[i]->setTarget(volume * mPanningVolumes[i]);
+    }
+    mTargetVolume = volume;
 }
 
+/**
+ * @param audioBalance between -1.0 and 1.0
+ */
+void AAudioFlowGraph::setAudioBalance(float audioBalance) {
+    if (mPanningVolumes.size() >= 2) {
+        float leftMultiplier = 0;
+        float rightMultiplier = 0;
+        mBalance.computeStereoBalance(audioBalance, &leftMultiplier, &rightMultiplier);
+        mPanningVolumes[0] = leftMultiplier;
+        mPanningVolumes[1] = rightMultiplier;
+        mVolumeRamps[0]->setTarget(mTargetVolume * leftMultiplier);
+        mVolumeRamps[1]->setTarget(mTargetVolume * rightMultiplier);
+    }
+}
+
+/**
+ * @param numFrames to slowly adjust for volume changes
+ */
 void AAudioFlowGraph::setRampLengthInFrames(int32_t numFrames) {
-    mVolumeRamp->setLengthInFrames(numFrames);
+    for (auto& ramp : mVolumeRamps) {
+        ramp->setLengthInFrames(numFrames);
+    }
 }
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index a49f64e..602c17f 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -23,8 +23,12 @@
 #include <system/audio.h>
 
 #include <aaudio/AAudio.h>
+#include <audio_utils/Balance.h>
 #include <flowgraph/ClipToRange.h>
+#include <flowgraph/ManyToMultiConverter.h>
+#include <flowgraph/MonoBlend.h>
 #include <flowgraph/MonoToMultiConverter.h>
+#include <flowgraph/MultiToManyConverter.h>
 #include <flowgraph/RampLinear.h>
 
 class AAudioFlowGraph {
@@ -36,12 +40,19 @@
      * @param sourceChannelCount
      * @param sinkFormat
      * @param sinkChannelCount
+     * @param useMonoBlend
+     * @param audioBalance
+     * @param channelMask
+     * @param isExclusive
      * @return
      */
     aaudio_result_t configure(audio_format_t sourceFormat,
                               int32_t sourceChannelCount,
                               audio_format_t sinkFormat,
-                              int32_t sinkChannelCount);
+                              int32_t sinkChannelCount,
+                              bool useMonoBlend,
+                              float audioBalance,
+                              bool isExclusive);
 
     void process(const void *source, void *destination, int32_t numFrames);
 
@@ -50,14 +61,30 @@
      */
     void setTargetVolume(float volume);
 
+    /**
+     * @param audioBalance between -1.0 and 1.0
+     */
+    void setAudioBalance(float audioBalance);
+
+    /**
+     * @param numFrames to slowly adjust for volume changes
+     */
     void setRampLengthInFrames(int32_t numFrames);
 
 private:
-    std::unique_ptr<flowgraph::AudioSource>          mSource;
-    std::unique_ptr<flowgraph::RampLinear>           mVolumeRamp;
-    std::unique_ptr<flowgraph::ClipToRange>          mClipper;
-    std::unique_ptr<flowgraph::MonoToMultiConverter> mChannelConverter;
-    std::unique_ptr<flowgraph::AudioSink>            mSink;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ClipToRange> mClipper;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ManyToMultiConverter>
+            mManyToMultiConverter;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MultiToManyConverter>
+            mMultiToManyConverter;
+    std::vector<std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::RampLinear>> mVolumeRamps;
+    std::vector<float> mPanningVolumes;
+    float mTargetVolume = 1.0f;
+    android::audio_utils::Balance mBalance;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSink> mSink;
 };
 
 
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index ebc9f2b..e780f4f 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -31,13 +31,6 @@
 #define RIDICULOUSLY_LARGE_BUFFER_CAPACITY   (256 * 1024)
 #define RIDICULOUSLY_LARGE_FRAME_SIZE        4096
 
-AudioEndpoint::AudioEndpoint()
-    : mFreeRunning(false)
-    , mDataReadCounter(0)
-    , mDataWriteCounter(0)
-{
-}
-
 // TODO Consider moving to a method in RingBufferDescriptor
 static aaudio_result_t AudioEndpoint_validateQueueDescriptor(const char *type,
                                                   const RingBufferDescriptor *descriptor) {
@@ -146,38 +139,49 @@
     );
 
     // ============================ data queue =============================
-    descriptor = &pEndpointDescriptor->dataQueueDescriptor;
-    ALOGV("configure() data framesPerBurst = %d", descriptor->framesPerBurst);
+    result = configureDataQueue(pEndpointDescriptor->dataQueueDescriptor, direction);
+
+    return result;
+}
+
+aaudio_result_t AudioEndpoint::configureDataQueue(const RingBufferDescriptor& descriptor,
+                                                  aaudio_direction_t direction) {
+    aaudio_result_t result = AudioEndpoint_validateQueueDescriptor("data", &descriptor);
+    if (result != AAUDIO_OK) {
+        return result;
+    }
+
+    ALOGV("configure() data framesPerBurst = %d", descriptor.framesPerBurst);
     ALOGV("configure() data readCounterAddress = %p",
-          descriptor->readCounterAddress);
+          descriptor.readCounterAddress);
 
     // An example of free running is when the other side is read or written by hardware DMA
     // or a DSP. It does not update its counter so we have to update it.
     int64_t *remoteCounter = (direction == AAUDIO_DIRECTION_OUTPUT)
-                             ? descriptor->readCounterAddress // read by other side
-                             : descriptor->writeCounterAddress; // written by other side
+                             ? descriptor.readCounterAddress // read by other side
+                             : descriptor.writeCounterAddress; // written by other side
     mFreeRunning = (remoteCounter == nullptr);
     ALOGV("configure() mFreeRunning = %d", mFreeRunning ? 1 : 0);
 
-    int64_t *readCounterAddress = (descriptor->readCounterAddress == nullptr)
+    int64_t *readCounterAddress = (descriptor.readCounterAddress == nullptr)
                                   ? &mDataReadCounter
-                                  : descriptor->readCounterAddress;
-    int64_t *writeCounterAddress = (descriptor->writeCounterAddress == nullptr)
+                                  : descriptor.readCounterAddress;
+    int64_t *writeCounterAddress = (descriptor.writeCounterAddress == nullptr)
                                   ? &mDataWriteCounter
-                                  : descriptor->writeCounterAddress;
+                                  : descriptor.writeCounterAddress;
 
     // Clear buffer to avoid an initial glitch on some devices.
-    size_t bufferSizeBytes = descriptor->capacityInFrames * descriptor->bytesPerFrame;
-    memset(descriptor->dataAddress, 0, bufferSizeBytes);
+    size_t bufferSizeBytes = descriptor.capacityInFrames * descriptor.bytesPerFrame;
+    memset(descriptor.dataAddress, 0, bufferSizeBytes);
 
     mDataQueue = std::make_unique<FifoBufferIndirect>(
-            descriptor->bytesPerFrame,
-            descriptor->capacityInFrames,
+            descriptor.bytesPerFrame,
+            descriptor.capacityInFrames,
             readCounterAddress,
             writeCounterAddress,
-            descriptor->dataAddress
+            descriptor.dataAddress
     );
-    uint32_t threshold = descriptor->capacityInFrames / 2;
+    uint32_t threshold = descriptor.capacityInFrames / 2;
     mDataQueue->setThreshold(threshold);
     return result;
 }
@@ -188,47 +192,66 @@
 }
 
 int32_t AudioEndpoint::getEmptyFramesAvailable(WrappingBuffer *wrappingBuffer) {
-    return mDataQueue->getEmptyRoomAvailable(wrappingBuffer);
+    return mDataQueue == nullptr ? 0 : mDataQueue->getEmptyRoomAvailable(wrappingBuffer);
 }
 
 int32_t AudioEndpoint::getEmptyFramesAvailable() {
-    return mDataQueue->getEmptyFramesAvailable();
+    return mDataQueue == nullptr ? 0 : mDataQueue->getEmptyFramesAvailable();
 }
 
 int32_t AudioEndpoint::getFullFramesAvailable(WrappingBuffer *wrappingBuffer) {
-    return mDataQueue->getFullDataAvailable(wrappingBuffer);
+    return mDataQueue == nullptr ? 0 : mDataQueue->getFullDataAvailable(wrappingBuffer);
 }
 
 int32_t AudioEndpoint::getFullFramesAvailable() {
-    return mDataQueue->getFullFramesAvailable();
+    return mDataQueue == nullptr ? 0 : mDataQueue->getFullFramesAvailable();
+}
+
+android::fifo_frames_t AudioEndpoint::read(void *buffer, android::fifo_frames_t numFrames) {
+    return mDataQueue == nullptr ? 0 : mDataQueue->read(buffer, numFrames);
+}
+
+android::fifo_frames_t AudioEndpoint::write(void *buffer, android::fifo_frames_t numFrames) {
+    return mDataQueue == nullptr ? 0 : mDataQueue->write(buffer, numFrames);
 }
 
 void AudioEndpoint::advanceWriteIndex(int32_t deltaFrames) {
-    mDataQueue->advanceWriteIndex(deltaFrames);
+    if (mDataQueue != nullptr) {
+        mDataQueue->advanceWriteIndex(deltaFrames);
+    }
 }
 
 void AudioEndpoint::advanceReadIndex(int32_t deltaFrames) {
-    mDataQueue->advanceReadIndex(deltaFrames);
+    if (mDataQueue != nullptr) {
+        mDataQueue->advanceReadIndex(deltaFrames);
+    }
 }
 
 void AudioEndpoint::setDataReadCounter(fifo_counter_t framesRead) {
-    mDataQueue->setReadCounter(framesRead);
+    if (mDataQueue != nullptr) {
+        mDataQueue->setReadCounter(framesRead);
+    }
 }
 
 fifo_counter_t AudioEndpoint::getDataReadCounter() const {
-    return mDataQueue->getReadCounter();
+    return mDataQueue == nullptr ? 0 : mDataQueue->getReadCounter();
 }
 
 void AudioEndpoint::setDataWriteCounter(fifo_counter_t framesRead) {
-    mDataQueue->setWriteCounter(framesRead);
+    if (mDataQueue != nullptr) {
+        mDataQueue->setWriteCounter(framesRead);
+    }
 }
 
 fifo_counter_t AudioEndpoint::getDataWriteCounter() const {
-    return mDataQueue->getWriteCounter();
+    return mDataQueue == nullptr ? 0 : mDataQueue->getWriteCounter();
 }
 
 int32_t AudioEndpoint::setBufferSizeInFrames(int32_t requestedFrames,
                                             int32_t *actualFrames) {
+    if (mDataQueue == nullptr) {
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
     if (requestedFrames < ENDPOINT_DATA_QUEUE_SIZE_MIN) {
         requestedFrames = ENDPOINT_DATA_QUEUE_SIZE_MIN;
     }
@@ -238,11 +261,11 @@
 }
 
 int32_t AudioEndpoint::getBufferSizeInFrames() const {
-    return mDataQueue->getThreshold();
+    return mDataQueue == nullptr ? 0 : mDataQueue->getThreshold();
 }
 
 int32_t AudioEndpoint::getBufferCapacityInFrames() const {
-    return (int32_t)mDataQueue->getBufferCapacityInFrames();
+    return mDataQueue == nullptr ? 0 : (int32_t)mDataQueue->getBufferCapacityInFrames();
 }
 
 void AudioEndpoint::dump() const {
@@ -251,5 +274,7 @@
 }
 
 void AudioEndpoint::eraseDataMemory() {
-    mDataQueue->eraseMemory();
+    if (mDataQueue != nullptr) {
+        mDataQueue->eraseMemory();
+    }
 }
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 4c8d60f..01dd05a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_AAUDIO_AUDIO_ENDPOINT_H
 #define ANDROID_AAUDIO_AUDIO_ENDPOINT_H
 
+#include <mutex>
+
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioServiceMessage.h"
@@ -34,7 +36,7 @@
 class AudioEndpoint {
 
 public:
-    AudioEndpoint();
+    AudioEndpoint() = default;
 
     /**
      * Configure based on the EndPointDescriptor_t.
@@ -42,6 +44,9 @@
     aaudio_result_t configure(const EndpointDescriptor *pEndpointDescriptor,
                               aaudio_direction_t direction);
 
+    aaudio_result_t configureDataQueue(const RingBufferDescriptor &descriptor,
+                            aaudio_direction_t direction);
+
     /**
      * Read from a command passed up from the Server.
      * @return 1 if command received, 0 for no command, or negative error.
@@ -56,6 +61,10 @@
 
     int32_t getFullFramesAvailable();
 
+    android::fifo_frames_t read(void* buffer, android::fifo_frames_t numFrames);
+
+    android::fifo_frames_t write(void* buffer, android::fifo_frames_t numFrames);
+
     void advanceReadIndex(int32_t deltaFrames);
 
     void advanceWriteIndex(int32_t deltaFrames);
@@ -85,19 +94,31 @@
 
     int32_t getBufferCapacityInFrames() const;
 
+    void setThreshold(int32_t frames) {
+        mDataQueue->setThreshold(frames);
+    }
+
+    int32_t getThreshold() {
+        return mDataQueue->getThreshold();
+    }
+
     /**
      * Write zeros to the data queue memory.
      */
     void eraseDataMemory();
 
+    void freeDataQueue();
+
     void dump() const;
 
 private:
     std::unique_ptr<android::FifoBufferIndirect> mUpCommandQueue;
     std::unique_ptr<android::FifoBufferIndirect> mDataQueue;
-    bool                    mFreeRunning;
-    android::fifo_counter_t mDataReadCounter; // only used if free-running
-    android::fifo_counter_t mDataWriteCounter; // only used if free-running
+    bool                    mFreeRunning{false};
+    android::fifo_counter_t mDataReadCounter{0}; // only used if free-running
+    android::fifo_counter_t mDataWriteCounter{0}; // only used if free-running
+
+    std::mutex mDataQueueLock;
 };
 
 } // namespace aaudio
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index f933b29..9f0564f 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -27,6 +27,8 @@
 #include <aaudio/AAudio.h>
 #include <cutils/properties.h>
 
+#include <media/AudioParameter.h>
+#include <media/AudioSystem.h>
 #include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
@@ -49,8 +51,6 @@
 // This is needed to make sense of the logs more easily.
 #define LOG_TAG (mInService ? "AudioStreamInternal_Service" : "AudioStreamInternal_Client")
 
-using android::Mutex;
-using android::WrappingBuffer;
 using android::content::AttributionSourceState;
 
 using namespace aaudio;
@@ -81,8 +81,6 @@
 aaudio_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
 
     aaudio_result_t result = AAUDIO_OK;
-    int32_t framesPerBurst;
-    int32_t framesPerHardwareBurst;
     AAudioStreamRequest request;
     AAudioStreamConfiguration configurationOutput;
 
@@ -97,9 +95,6 @@
         return result;
     }
 
-    const int32_t burstMinMicros = AAudioProperty_getHardwareBurstMinMicros();
-    int32_t burstMicros = 0;
-
     const audio_format_t requestedFormat = getFormat();
     // We have to do volume scaling. So we prefer FLOAT format.
     if (requestedFormat == AUDIO_FORMAT_DEFAULT) {
@@ -215,12 +210,28 @@
         goto error;
     }
 
-    framesPerHardwareBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    if ((result = configureDataInformation(builder.getFramesPerDataCallback())) != AAUDIO_OK) {
+        goto error;
+    }
+
+    setState(AAUDIO_STREAM_STATE_OPEN);
+
+    return result;
+
+error:
+    safeReleaseClose();
+    return result;
+}
+
+aaudio_result_t AudioStreamInternal::configureDataInformation(int32_t callbackFrames) {
+    int32_t framesPerHardwareBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
 
     // Scale up the burst size to meet the minimum equivalent in microseconds.
     // This is to avoid waking the CPU too often when the HW burst is very small
     // or at high sample rates.
-    framesPerBurst = framesPerHardwareBurst;
+    int32_t framesPerBurst = framesPerHardwareBurst;
+    int32_t burstMicros = 0;
+    const int32_t burstMinMicros = android::AudioSystem::getAAudioHardwareBurstMinUsec();
     do {
         if (burstMicros > 0) {  // skip first loop
             framesPerBurst *= 2;
@@ -233,8 +244,7 @@
     // Validate final burst size.
     if (framesPerBurst < MIN_FRAMES_PER_BURST || framesPerBurst > MAX_FRAMES_PER_BURST) {
         ALOGE("%s - framesPerBurst out of range = %d", __func__, framesPerBurst);
-        result = AAUDIO_ERROR_OUT_OF_RANGE;
-        goto error;
+        return AAUDIO_ERROR_OUT_OF_RANGE;
     }
     setFramesPerBurst(framesPerBurst); // only save good value
 
@@ -242,26 +252,21 @@
     if (mBufferCapacityInFrames < getFramesPerBurst()
             || mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
         ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
-        result = AAUDIO_ERROR_OUT_OF_RANGE;
-        goto error;
+        return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
     mClockModel.setSampleRate(getSampleRate());
     mClockModel.setFramesPerBurst(framesPerHardwareBurst);
 
     if (isDataCallbackSet()) {
-        mCallbackFrames = builder.getFramesPerDataCallback();
+        mCallbackFrames = callbackFrames;
         if (mCallbackFrames > getBufferCapacity() / 2) {
             ALOGW("%s - framesPerCallback too big = %d, capacity = %d",
                   __func__, mCallbackFrames, getBufferCapacity());
-            result = AAUDIO_ERROR_OUT_OF_RANGE;
-            goto error;
-
+            return AAUDIO_ERROR_OUT_OF_RANGE;
         } else if (mCallbackFrames < 0) {
             ALOGW("%s - framesPerCallback negative", __func__);
-            result = AAUDIO_ERROR_OUT_OF_RANGE;
-            goto error;
-
+            return AAUDIO_ERROR_OUT_OF_RANGE;
         }
         if (mCallbackFrames == AAUDIO_UNSPECIFIED) {
             mCallbackFrames = getFramesPerBurst();
@@ -271,6 +276,18 @@
         mCallbackBuffer = std::make_unique<uint8_t[]>(callbackBufferSize);
     }
 
+    // Exclusive output streams should combine channels when mono audio adjustment
+    // is enabled. They should also adjust for audio balance.
+    if ((getDirection() == AAUDIO_DIRECTION_OUTPUT) &&
+        (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE)) {
+        bool isMasterMono = false;
+        android::AudioSystem::getMasterMono(&isMasterMono);
+        setRequireMonoBlend(isMasterMono);
+        float audioBalance = 0;
+        android::AudioSystem::getMasterBalance(&audioBalance);
+        setAudioBalance(audioBalance);
+    }
+
     // For debugging and analyzing the distribution of MMAP timestamps.
     // For OUTPUT, use a NEGATIVE offset to move the CPU writes further BEFORE the HW reads.
     // For INPUT, use a POSITIVE offset to move the CPU reads further AFTER the HW writes.
@@ -290,14 +307,7 @@
     }
 
     setBufferSize(mBufferCapacityInFrames / 2); // Default buffer size to match Q
-
-    setState(AAUDIO_STREAM_STATE_OPEN);
-
-    return result;
-
-error:
-    safeReleaseClose();
-    return result;
+    return AAUDIO_OK;
 }
 
 // This must be called under mStreamLock.
@@ -338,13 +348,67 @@
 {
     AudioStreamInternal *stream = (AudioStreamInternal *)context;
     //LOGD("oboe_callback_thread, stream = %p", stream);
-    if (stream != NULL) {
+    if (stream != nullptr) {
         return stream->callbackLoop();
     } else {
-        return NULL;
+        return nullptr;
     }
 }
 
+aaudio_result_t AudioStreamInternal::exitStandby_l() {
+    AudioEndpointParcelable endpointParcelable;
+    // The stream is in standby mode, copy all available data and then close the duplicated
+    // shared file descriptor so that it won't cause issue when the HAL try to reallocate new
+    // shared file descriptor when exiting from standby.
+    // Cache current read counter, which will be reset to new read and write counter
+    // when the new data queue and endpoint are reconfigured.
+    const android::fifo_counter_t readCounter = mAudioEndpoint->getDataReadCounter();
+    // Cache the buffer size which may be from client.
+    const int32_t previousBufferSize = mBufferSizeInFrames;
+    // Copy all available data from current data queue.
+    uint8_t buffer[getBufferCapacity() * getBytesPerFrame()];
+    android::fifo_frames_t fullFramesAvailable =
+            mAudioEndpoint->read(buffer, getBufferCapacity());
+    mEndPointParcelable.closeDataFileDescriptor();
+    aaudio_result_t result = mServiceInterface.exitStandby(
+            mServiceStreamHandle, endpointParcelable);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to exit standby, error=%d", result);
+        goto exit;
+    }
+    // Reconstruct data queue descriptor using new shared file descriptor.
+    mEndPointParcelable.updateDataFileDescriptor(&endpointParcelable);
+    result = mEndPointParcelable.resolveDataQueue(&mEndpointDescriptor.dataQueueDescriptor);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to resolve data queue after exiting standby, error=%d", result);
+        goto exit;
+    }
+    // Reconfigure audio endpoint with new data queue descriptor.
+    mAudioEndpoint->configureDataQueue(
+            mEndpointDescriptor.dataQueueDescriptor, getDirection());
+    // Set read and write counters with previous read counter, the later write action
+    // will make the counter at the correct place.
+    mAudioEndpoint->setDataReadCounter(readCounter);
+    mAudioEndpoint->setDataWriteCounter(readCounter);
+    result = configureDataInformation(mCallbackFrames);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to configure data information after exiting standby, error=%d", result);
+        goto exit;
+    }
+    // Write data from previous data buffer to new endpoint.
+    if (android::fifo_frames_t framesWritten =
+                mAudioEndpoint->write(buffer, fullFramesAvailable);
+            framesWritten != fullFramesAvailable) {
+        ALOGW("Some data lost after exiting standby, frames written: %d, "
+              "frames to write: %d", framesWritten, fullFramesAvailable);
+    }
+    // Reset previous buffer size as it may be requested by the client.
+    setBufferSize(previousBufferSize);
+
+exit:
+    return result;
+}
+
 /*
  * It normally takes about 20-30 msec to start a stream on the server.
  * But the first time can take as much as 200-300 msec. The HW
@@ -381,8 +445,15 @@
     prepareBuffersForStart(); // tell subclasses to get ready
 
     aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
-    if (result == AAUDIO_ERROR_INVALID_HANDLE) {
-        ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
+    if (result == AAUDIO_ERROR_STANDBY) {
+        // The stream is at standby mode. Need to exit standby before starting the stream.
+        result = exitStandby_l();
+        if (result == AAUDIO_OK) {
+            result = mServiceInterface.startStream(mServiceStreamHandle);
+        }
+    }
+    if (result != AAUDIO_OK) {
+        ALOGD("%s() error = %d, stream was probably stolen", __func__, result);
         // Stealing was added in R. Coerce result to improve backward compatibility.
         result = AAUDIO_ERROR_DISCONNECTED;
         setState(AAUDIO_STREAM_STATE_DISCONNECTED);
@@ -402,6 +473,7 @@
         result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
     }
     if (result != AAUDIO_OK) {
+        // TODO(b/214607638): Do we want to roll back to original state or keep as disconnected?
         setState(originalState);
     }
     return result;
@@ -430,7 +502,7 @@
     if (isDataCallbackSet()
             && (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
         mCallbackEnabled.store(false);
-        aaudio_result_t result = joinThread_l(NULL); // may temporarily unlock mStreamLock
+        aaudio_result_t result = joinThread_l(nullptr); // may temporarily unlock mStreamLock
         if (result == AAUDIO_ERROR_INVALID_HANDLE) {
             ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
             result = AAUDIO_OK;
@@ -517,7 +589,7 @@
     return result;
 }
 
-aaudio_result_t AudioStreamInternal::getTimestamp(clockid_t clockId,
+aaudio_result_t AudioStreamInternal::getTimestamp(clockid_t /*clockId*/,
                            int64_t *framePosition,
                            int64_t *timeNanoseconds) {
     // Generated in server and passed to client. Return latest.
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index fbe4c13..2367572 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -116,7 +116,7 @@
 
     virtual void prepareBuffersForStart() {}
 
-    virtual void advanceClientToMatchServerPosition(int32_t serverMargin = 0) = 0;
+    virtual void advanceClientToMatchServerPosition(int32_t serverMargin) = 0;
 
     virtual void onFlushFromServer() {}
 
@@ -184,9 +184,14 @@
     aaudio_result_t writeNowWithConversion(const void *buffer,
                                      int32_t numFrames);
 
+    // Exit the stream from standby, will reconstruct data path.
+    aaudio_result_t exitStandby_l() REQUIRES(mStreamLock);
+
     // Adjust timing model based on timestamp from service.
     void processTimestamp(uint64_t position, int64_t time);
 
+    aaudio_result_t configureDataInformation(int32_t callbackFrames);
+
     // Thread on other side of FIFO will have wakeup jitter.
     // By delaying slightly we can avoid waking up before other side is ready.
     const int32_t            mWakeupDelayNanos; // delay past typical wakeup jitter
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index 2da5406..1efccb1 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -46,8 +46,6 @@
 
 }
 
-AudioStreamInternalCapture::~AudioStreamInternalCapture() {}
-
 void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter();
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
@@ -109,7 +107,7 @@
     if (mNeedCatchUp.isRequested()) {
         // Catch an MMAP pointer that is already advancing.
         // This will avoid initial underruns caused by a slow cold start.
-        advanceClientToMatchServerPosition();
+        advanceClientToMatchServerPosition(0 /*serverMargin*/);
         mNeedCatchUp.acknowledge();
     }
 
@@ -228,7 +226,7 @@
 void *AudioStreamInternalCapture::callbackLoop() {
     aaudio_result_t result = AAUDIO_OK;
     aaudio_data_callback_result_t callbackResult = AAUDIO_CALLBACK_RESULT_CONTINUE;
-    if (!isDataCallbackSet()) return NULL;
+    if (!isDataCallbackSet()) return nullptr;
 
     // result might be a frame count
     while (mCallbackEnabled.load() && isActive() && (result >= 0)) {
@@ -260,5 +258,5 @@
 
     ALOGD("callbackLoop() exiting, result = %d, isActive() = %d",
           result, (int) isActive());
-    return NULL;
+    return nullptr;
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 251a7f2..87017de 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -28,8 +28,9 @@
 
 class AudioStreamInternalCapture : public AudioStreamInternal {
 public:
-    AudioStreamInternalCapture(AAudioServiceInterface  &serviceInterface, bool inService = false);
-    virtual ~AudioStreamInternalCapture();
+    explicit AudioStreamInternalCapture(AAudioServiceInterface  &serviceInterface,
+                                        bool inService = false);
+    virtual ~AudioStreamInternalCapture() = default;
 
     aaudio_result_t read(void *buffer,
                          int32_t numFrames,
@@ -45,7 +46,7 @@
     }
 protected:
 
-    void advanceClientToMatchServerPosition(int32_t serverOffset = 0) override;
+    void advanceClientToMatchServerPosition(int32_t serverOffset) override;
 
 /**
  * Low level data processing that will not block. It will just read or write as much as it can.
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 71bde90..7c7a969 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -44,8 +44,6 @@
 
 }
 
-AudioStreamInternalPlay::~AudioStreamInternalPlay() {}
-
 constexpr int kRampMSec = 10; // time to apply a change in volume
 
 aaudio_result_t AudioStreamInternalPlay::open(const AudioStreamBuilder &builder) {
@@ -54,7 +52,10 @@
         result = mFlowGraph.configure(getFormat(),
                              getSamplesPerFrame(),
                              getDeviceFormat(),
-                             getDeviceChannelCount());
+                             getDeviceChannelCount(),
+                             getRequireMonoBlend(),
+                             getAudioBalance(),
+                             (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE));
 
         if (result != AAUDIO_OK) {
             safeReleaseClose();
@@ -115,7 +116,7 @@
 }
 
 void AudioStreamInternalPlay::onFlushFromServer() {
-    advanceClientToMatchServerPosition();
+    advanceClientToMatchServerPosition(0 /*serverMargin*/);
 }
 
 // Write the data, block if needed and timeoutMillis > 0
@@ -201,10 +202,18 @@
                 break;
             case AAUDIO_STREAM_STATE_STARTED:
             {
-                // Sleep until the readCounter catches up and we only have
-                // the getBufferSize() frames of data sitting in the buffer.
-                int64_t nextReadPosition = mAudioEndpoint->getDataWriteCounter() - getBufferSize();
-                wakeTime = mClockModel.convertPositionToTime(nextReadPosition);
+                // Calculate when there will be room available to write to the buffer.
+                // If the appBufferSize is smaller than the endpointBufferSize then
+                // we will have room to write data beyond the appBufferSize.
+                // That is a technique used to reduce glitches without adding latency.
+                const int32_t appBufferSize = getBufferSize();
+                // The endpoint buffer size is set to the maximum that can be written.
+                // If we use it then we must carve out some room to write data when we wake up.
+                const int32_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
+                        - getFramesPerBurst();
+                const int32_t bestBufferSize = std::min(appBufferSize, endBufferSize);
+                int64_t targetReadPosition = mAudioEndpoint->getDataWriteCounter() - bestBufferSize;
+                wakeTime = mClockModel.convertPositionToTime(targetReadPosition);
             }
                 break;
             default:
@@ -281,7 +290,7 @@
     ALOGD("%s() entering >>>>>>>>>>>>>>>", __func__);
     aaudio_result_t result = AAUDIO_OK;
     aaudio_data_callback_result_t callbackResult = AAUDIO_CALLBACK_RESULT_CONTINUE;
-    if (!isDataCallbackSet()) return NULL;
+    if (!isDataCallbackSet()) return nullptr;
     int64_t timeoutNanos = calculateReasonableTimeout(mCallbackFrames);
 
     // result might be a frame count
@@ -309,7 +318,7 @@
 
     ALOGD("%s() exiting, result = %d, isActive() = %d <<<<<<<<<<<<<<",
           __func__, result, (int) isActive());
-    return NULL;
+    return nullptr;
 }
 
 //------------------------------------------------------------------------------
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index 03c957d..e761807 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -30,8 +30,9 @@
 
 class AudioStreamInternalPlay : public AudioStreamInternal {
 public:
-    AudioStreamInternalPlay(AAudioServiceInterface  &serviceInterface, bool inService = false);
-    virtual ~AudioStreamInternalPlay();
+    explicit AudioStreamInternalPlay(AAudioServiceInterface  &serviceInterface,
+                                     bool inService = false);
+    virtual ~AudioStreamInternalPlay() = default;
 
     aaudio_result_t open(const AudioStreamBuilder &builder) override;
 
@@ -66,7 +67,7 @@
 
     void prepareBuffersForStart() override;
 
-    void advanceClientToMatchServerPosition(int32_t serverMargin = 0) override;
+    void advanceClientToMatchServerPosition(int32_t serverMargin) override;
 
     void onFlushFromServer() override;
 
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index f0dcd44..6921271 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -43,14 +43,7 @@
 // and dumped to the log when the stream is stopped.
 
 IsochronousClockModel::IsochronousClockModel()
-        : mMarkerFramePosition(0)
-        , mMarkerNanoTime(0)
-        , mSampleRate(48000)
-        , mFramesPerBurst(48)
-        , mBurstPeriodNanos(0) // this will be updated before use
-        , mMaxMeasuredLatenessNanos(0)
-        , mLatenessForDriftNanos(kInitialLatenessForDriftNanos)
-        , mState(STATE_STOPPED)
+        : mLatenessForDriftNanos(kInitialLatenessForDriftNanos)
 {
     if ((AAudioProperty_getLogMask() & AAUDIO_LOG_CLOCK_MODEL_HISTOGRAM) != 0) {
         mHistogramMicros = std::make_unique<Histogram>(kHistogramBinCount,
diff --git a/media/libaaudio/src/client/IsochronousClockModel.h b/media/libaaudio/src/client/IsochronousClockModel.h
index 6280013..3007237 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.h
+++ b/media/libaaudio/src/client/IsochronousClockModel.h
@@ -149,16 +149,16 @@
     static constexpr int32_t   kHistogramBinWidthMicros = 50;
     static constexpr int32_t   kHistogramBinCount = 128;
 
-    int64_t             mMarkerFramePosition; // Estimated HW position.
-    int64_t             mMarkerNanoTime;      // Estimated HW time.
-    int32_t             mSampleRate;
-    int32_t             mFramesPerBurst;      // number of frames transferred at one time.
-    int32_t             mBurstPeriodNanos;    // Time between HW bursts.
+    int64_t             mMarkerFramePosition{0}; // Estimated HW position.
+    int64_t             mMarkerNanoTime{0};      // Estimated HW time.
+    int32_t             mSampleRate{48000};
+    int32_t             mFramesPerBurst{48};     // number of frames transferred at one time.
+    int32_t             mBurstPeriodNanos{0};    // Time between HW bursts.
     // Includes mBurstPeriodNanos because we sample randomly over time.
-    int32_t             mMaxMeasuredLatenessNanos;
+    int32_t             mMaxMeasuredLatenessNanos{0};
     // Threshold for lateness that triggers a drift later in time.
     int32_t             mLatenessForDriftNanos;
-    clock_model_state_t mState;               // State machine handles startup sequence.
+    clock_model_state_t mState{STATE_STOPPED};   // State machine handles startup sequence.
 
     int32_t             mTimestampCount = 0;  // For logging.
 
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index dc242b8..8b7b75e 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -30,9 +30,6 @@
 // HDMI supports up to 32 channels at 1536000 Hz.
 #define SAMPLE_RATE_HZ_MAX           1600000
 
-AAudioStreamParameters::AAudioStreamParameters() {}
-AAudioStreamParameters::~AAudioStreamParameters() {}
-
 void AAudioStreamParameters::copyFrom(const AAudioStreamParameters &other) {
     mSamplesPerFrame      = other.mSamplesPerFrame;
     mSampleRate           = other.mSampleRate;
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index fed036b..cb998bf 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -26,8 +26,8 @@
 
 class AAudioStreamParameters {
 public:
-    AAudioStreamParameters();
-    virtual ~AAudioStreamParameters();
+    AAudioStreamParameters() = default;
+    virtual ~AAudioStreamParameters() = default;
 
     int32_t getDeviceId() const {
         return mDeviceId;
@@ -150,7 +150,7 @@
     }
 
     // TODO b/182392769: reexamine if Identity can be used
-    void setOpPackageName(const std::optional<std::string> opPackageName) {
+    void setOpPackageName(const std::optional<std::string>& opPackageName) {
         mOpPackageName = opPackageName;
     }
 
@@ -158,7 +158,7 @@
         return mAttributionTag;
     }
 
-    void setAttributionTag(const std::optional<std::string> attributionTag) {
+    void setAttributionTag(const std::optional<std::string>& attributionTag) {
         mAttributionTag = attributionTag;
     }
 
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 1e88d15..6c22744 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -31,7 +31,8 @@
 const char* AudioGlobal_convertResultToText(aaudio_result_t returnCode);
 const char* AudioGlobal_convertSharingModeToText(aaudio_sharing_mode_t mode);
 const char* AudioGlobal_convertStreamStateToText(aaudio_stream_state_t state);
-}
+
+} // namespace aaudio
 
 #endif  // AAUDIO_AUDIOGLOBAL_H
 
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 7896e63..5fb4528 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -277,6 +277,14 @@
         return mIsPrivacySensitive;
     }
 
+    bool getRequireMonoBlend() const {
+        return mRequireMonoBlend;
+    }
+
+    float getAudioBalance() const {
+        return mAudioBalance;
+    }
+
     /**
      * This is only valid after setChannelMask() and setFormat()
      * have been called.
@@ -447,7 +455,7 @@
     // PlayerBase allows the system to control the stream volume.
     class MyPlayerBase : public android::PlayerBase {
     public:
-        MyPlayerBase() {};
+        MyPlayerBase() = default;
 
         virtual ~MyPlayerBase() = default;
 
@@ -576,7 +584,7 @@
      * @param numFrames
      * @return original pointer or the conversion buffer
      */
-    virtual const void * maybeConvertDeviceData(const void *audioData, int32_t numFrames) {
+    virtual const void * maybeConvertDeviceData(const void *audioData, int32_t /*numFrames*/) {
         return audioData;
     }
 
@@ -631,6 +639,20 @@
         mIsPrivacySensitive = privacySensitive;
     }
 
+    /**
+     * This should not be called after the open() call.
+     */
+    void setRequireMonoBlend(bool requireMonoBlend) {
+        mRequireMonoBlend = requireMonoBlend;
+    }
+
+    /**
+     * This should not be called after the open() call.
+     */
+    void setAudioBalance(float audioBalance) {
+        mAudioBalance = audioBalance;
+    }
+
     std::string mMetricsId; // set once during open()
 
     std::mutex                 mStreamLock;
@@ -672,6 +694,8 @@
     aaudio_input_preset_t       mInputPreset     = AAUDIO_UNSPECIFIED;
     aaudio_allowed_capture_policy_t mAllowedCapturePolicy = AAUDIO_ALLOW_CAPTURE_BY_ALL;
     bool                        mIsPrivacySensitive = false;
+    bool                        mRequireMonoBlend = false;
+    float                       mAudioBalance = 0;
 
     int32_t                     mSessionId = AAUDIO_UNSPECIFIED;
 
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 5e1e007..a100aa9 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -20,9 +20,14 @@
 
 #include <new>
 #include <stdint.h>
+#include <vector>
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <media/AudioSystem.h>
 
 #include "binding/AAudioBinderClient.h"
 #include "client/AudioStreamInternalCapture.h"
@@ -35,6 +40,10 @@
 
 using namespace aaudio;
 
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
 #define AAUDIO_MMAP_POLICY_DEFAULT             AAUDIO_POLICY_NEVER
 #define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT   AAUDIO_POLICY_NEVER
 
@@ -53,16 +62,10 @@
 /*
  * AudioStreamBuilder
  */
-AudioStreamBuilder::AudioStreamBuilder() {
-}
-
-AudioStreamBuilder::~AudioStreamBuilder() {
-}
-
 static aaudio_result_t builder_createStream(aaudio_direction_t direction,
-                                         aaudio_sharing_mode_t sharingMode,
-                                         bool tryMMap,
-                                         android::sp<AudioStream> &stream) {
+                                            aaudio_sharing_mode_t /*sharingMode*/,
+                                            bool tryMMap,
+                                            android::sp<AudioStream> &stream) {
     aaudio_result_t result = AAUDIO_OK;
 
     switch (direction) {
@@ -92,6 +95,37 @@
     return result;
 }
 
+namespace {
+
+aaudio_policy_t aidl2legacy_aaudio_policy(AudioMMapPolicy aidl) {
+    switch (aidl) {
+        case AudioMMapPolicy::NEVER:
+            return AAUDIO_POLICY_NEVER;
+        case AudioMMapPolicy::AUTO:
+            return AAUDIO_POLICY_AUTO;
+        case AudioMMapPolicy::ALWAYS:
+            return AAUDIO_POLICY_ALWAYS;
+        case AudioMMapPolicy::UNSPECIFIED:
+        default:
+            return AAUDIO_UNSPECIFIED;
+    }
+}
+
+// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
+// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+aaudio_policy_t getAAudioPolicy(
+        const std::vector<AudioMMapPolicyInfo>& policyInfos) {
+    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
+    for (size_t i = 1; i < policyInfos.size(); ++i) {
+        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+            return AAUDIO_POLICY_AUTO;
+        }
+    }
+    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+}
+
+} // namespace
+
 // Try to open using MMAP path if that is allowed.
 // Fall back to Legacy path if MMAP not available.
 // Exact behavior is controlled by MMapPolicy.
@@ -110,25 +144,32 @@
         return result;
     }
 
+    std::vector<AudioMMapPolicyInfo> policyInfos;
     // The API setting is the highest priority.
     aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
     // If not specified then get from a system property.
-    if (mmapPolicy == AAUDIO_UNSPECIFIED) {
-        mmapPolicy = AAudioProperty_getMMapPolicy();
+    if (mmapPolicy == AAUDIO_UNSPECIFIED && android::AudioSystem::getMmapPolicyInfo(
+                AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
+        mmapPolicy = getAAudioPolicy(policyInfos);
     }
     // If still not specified then use the default.
     if (mmapPolicy == AAUDIO_UNSPECIFIED) {
         mmapPolicy = AAUDIO_MMAP_POLICY_DEFAULT;
     }
 
-    int32_t mapExclusivePolicy = AAudioProperty_getMMapExclusivePolicy();
-    if (mapExclusivePolicy == AAUDIO_UNSPECIFIED) {
-        mapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
+    policyInfos.clear();
+    aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
+    if (android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
+        mmapExclusivePolicy = getAAudioPolicy(policyInfos);
+    }
+    if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
+        mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
     }
 
     aaudio_sharing_mode_t sharingMode = getSharingMode();
     if ((sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE)
-        && (mapExclusivePolicy == AAUDIO_POLICY_NEVER)) {
+        && (mmapExclusivePolicy == AAUDIO_POLICY_NEVER)) {
         ALOGD("%s() EXCLUSIVE sharing mode not supported. Use SHARED.", __func__);
         sharingMode = AAUDIO_SHARING_MODE_SHARED;
         setSharingMode(sharingMode);
@@ -280,9 +321,8 @@
           mFramesPerDataCallback);
     ALOGI("usage  = %6d, contentType = %d, inputPreset = %d, allowedCapturePolicy = %d",
           getUsage(), getContentType(), getInputPreset(), getAllowedCapturePolicy());
-    ALOGI("privacy sensitive = %s", isPrivacySensitive() ? "true" : "false");
-    ALOGI("opPackageName = %s", !getOpPackageName().has_value() ?
-        "(null)" : getOpPackageName().value().c_str());
-    ALOGI("attributionTag = %s", !getAttributionTag().has_value() ?
-        "(null)" : getAttributionTag().value().c_str());
+    ALOGI("privacy sensitive = %s, opPackageName = %s, attributionTag = %s",
+          isPrivacySensitive() ? "true" : "false",
+          !getOpPackageName().has_value() ? "(null)" : getOpPackageName().value().c_str(),
+          !getAttributionTag().has_value() ? "(null)" : getAttributionTag().value().c_str());
 }
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.h b/media/libaaudio/src/core/AudioStreamBuilder.h
index 9f93341..f91c25a 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.h
+++ b/media/libaaudio/src/core/AudioStreamBuilder.h
@@ -31,9 +31,9 @@
  */
 class AudioStreamBuilder : public AAudioStreamParameters {
 public:
-    AudioStreamBuilder();
+    AudioStreamBuilder() = default;
 
-    ~AudioStreamBuilder();
+    ~AudioStreamBuilder() = default;
 
     bool isSharingModeMatchRequired() const {
         return mSharingModeMatchRequired;
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 37548f0..7b0aca1 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -38,7 +38,7 @@
 
 class FifoBuffer {
 public:
-    FifoBuffer(int32_t bytesPerFrame);
+    explicit FifoBuffer(int32_t bytesPerFrame);
 
     virtual ~FifoBuffer() = default;
 
@@ -162,6 +162,6 @@
     uint8_t *mExternalStorage = nullptr;
 };
 
-}  // android
+}  // namespace android
 
 #endif //FIFO_FIFO_BUFFER_H
diff --git a/media/libaaudio/src/fifo/FifoController.h b/media/libaaudio/src/fifo/FifoController.h
index 057a94e..e15d444 100644
--- a/media/libaaudio/src/fifo/FifoController.h
+++ b/media/libaaudio/src/fifo/FifoController.h
@@ -36,7 +36,7 @@
     , mWriteCounter(0)
     {}
 
-    virtual ~FifoController() {}
+    virtual ~FifoController() = default;
 
     // TODO review use of memory barriers, probably incorrect
     virtual fifo_counter_t getReadCounter() override {
@@ -57,6 +57,6 @@
     std::atomic<fifo_counter_t> mWriteCounter;
 };
 
-}  // android
+}  // namespace android
 
 #endif //FIFO_FIFO_CONTROLLER_H
diff --git a/media/libaaudio/src/fifo/FifoControllerBase.cpp b/media/libaaudio/src/fifo/FifoControllerBase.cpp
index 1dece0e..ad6d041 100644
--- a/media/libaaudio/src/fifo/FifoControllerBase.cpp
+++ b/media/libaaudio/src/fifo/FifoControllerBase.cpp
@@ -29,9 +29,6 @@
 {
 }
 
-FifoControllerBase::~FifoControllerBase() {
-}
-
 fifo_frames_t FifoControllerBase::getFullFramesAvailable() {
     fifo_frames_t temp = 0;
     __builtin_sub_overflow(getWriteCounter(), getReadCounter(), &temp);
diff --git a/media/libaaudio/src/fifo/FifoControllerBase.h b/media/libaaudio/src/fifo/FifoControllerBase.h
index 1edb8a3..2a6173b 100644
--- a/media/libaaudio/src/fifo/FifoControllerBase.h
+++ b/media/libaaudio/src/fifo/FifoControllerBase.h
@@ -43,7 +43,7 @@
      */
     FifoControllerBase(fifo_frames_t capacity, fifo_frames_t threshold);
 
-    virtual ~FifoControllerBase();
+    virtual ~FifoControllerBase() = default;
 
     // Abstract methods to be implemented in subclasses.
     /**
@@ -123,6 +123,6 @@
     fifo_frames_t mThreshold;
 };
 
-}  // android
+}  // namespace android
 
 #endif // FIFO_FIFO_CONTROLLER_BASE_H
diff --git a/media/libaaudio/src/fifo/FifoControllerIndirect.h b/media/libaaudio/src/fifo/FifoControllerIndirect.h
index ec48e57..a59225a 100644
--- a/media/libaaudio/src/fifo/FifoControllerIndirect.h
+++ b/media/libaaudio/src/fifo/FifoControllerIndirect.h
@@ -44,7 +44,7 @@
         setReadCounter(0);
         setWriteCounter(0);
     }
-    virtual ~FifoControllerIndirect() {};
+    virtual ~FifoControllerIndirect() = default;
 
     // TODO review use of memory barriers, probably incorrect
     virtual fifo_counter_t getReadCounter() override {
@@ -68,6 +68,6 @@
     std::atomic<fifo_counter_t> * mWriteCounterAddress;
 };
 
-}  // android
+}  // namespace android
 
 #endif //FIFO_FIFO_CONTROLLER_INDIRECT_H
diff --git a/media/libaaudio/src/flowgraph/AudioProcessorBase.cpp b/media/libaaudio/src/flowgraph/AudioProcessorBase.cpp
deleted file mode 100644
index 5667fdb..0000000
--- a/media/libaaudio/src/flowgraph/AudioProcessorBase.cpp
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <algorithm>
-#include <sys/types.h>
-#include "AudioProcessorBase.h"
-
-using namespace flowgraph;
-
-/***************************************************************************/
-int32_t AudioProcessorBase::pullData(int64_t framePosition, int32_t numFrames) {
-    if (framePosition > mLastFramePosition) {
-        mLastFramePosition = framePosition;
-        mFramesValid = onProcess(framePosition, numFrames);
-    }
-    return mFramesValid;
-}
-
-/***************************************************************************/
-AudioFloatBlockPort::AudioFloatBlockPort(AudioProcessorBase &parent,
-                               int32_t samplesPerFrame,
-                               int32_t framesPerBlock)
-        : AudioPort(parent, samplesPerFrame)
-        , mFramesPerBlock(framesPerBlock)
-        , mSampleBlock(NULL) {
-    int32_t numFloats = framesPerBlock * getSamplesPerFrame();
-    mSampleBlock = new float[numFloats]{0.0f};
-}
-
-AudioFloatBlockPort::~AudioFloatBlockPort() {
-    delete[] mSampleBlock;
-}
-
-/***************************************************************************/
-int32_t AudioFloatOutputPort::pullData(int64_t framePosition, int32_t numFrames) {
-    numFrames = std::min(getFramesPerBlock(), numFrames);
-    return mParent.pullData(framePosition, numFrames);
-}
-
-// These need to be in the .cpp file because of forward cross references.
-void AudioFloatOutputPort::connect(AudioFloatInputPort *port) {
-    port->connect(this);
-}
-
-void AudioFloatOutputPort::disconnect(AudioFloatInputPort *port) {
-    port->disconnect(this);
-}
-
-/***************************************************************************/
-int32_t AudioFloatInputPort::pullData(int64_t framePosition, int32_t numFrames) {
-    return (mConnected == NULL)
-            ? std::min(getFramesPerBlock(), numFrames)
-            : mConnected->pullData(framePosition, numFrames);
-}
-
-float *AudioFloatInputPort::getBlock() {
-    if (mConnected == NULL) {
-        return AudioFloatBlockPort::getBlock(); // loaded using setValue()
-    } else {
-        return mConnected->getBlock();
-    }
-}
-
-/***************************************************************************/
-int32_t AudioSink::pull(int32_t numFrames) {
-    int32_t actualFrames = input.pullData(mFramePosition, numFrames);
-    mFramePosition += actualFrames;
-    return actualFrames;
-}
\ No newline at end of file
diff --git a/media/libaaudio/src/flowgraph/AudioProcessorBase.h b/media/libaaudio/src/flowgraph/AudioProcessorBase.h
deleted file mode 100644
index 972932f..0000000
--- a/media/libaaudio/src/flowgraph/AudioProcessorBase.h
+++ /dev/null
@@ -1,293 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * AudioProcessorBase.h
- *
- * Audio processing node and ports that can be used in a simple data flow graph.
- */
-
-#ifndef FLOWGRAPH_AUDIO_PROCESSOR_BASE_H
-#define FLOWGRAPH_AUDIO_PROCESSOR_BASE_H
-
-#include <cassert>
-#include <cstring>
-#include <math.h>
-#include <sys/types.h>
-#include <time.h>
-#include <unistd.h>
-
-// TODO consider publishing all header files under "include/libaaudio/FlowGraph.h"
-
-namespace flowgraph {
-
-// Default block size that can be overridden when the AudioFloatBlockPort is created.
-// If it is too small then we will have too much overhead from switching between nodes.
-// If it is too high then we will thrash the caches.
-constexpr int kDefaultBlockSize = 8; // arbitrary
-
-class AudioFloatInputPort;
-
-/***************************************************************************/
-class AudioProcessorBase {
-public:
-    virtual ~AudioProcessorBase() = default;
-
-    /**
-     * Perform custom function.
-     *
-     * @param framePosition index of first frame to be processed
-     * @param numFrames maximum number of frames requested for processing
-     * @return number of frames actually processed
-     */
-    virtual int32_t onProcess(int64_t framePosition, int32_t numFrames) = 0;
-
-    /**
-     * If the framePosition is at or after the last frame position then call onProcess().
-     * This prevents infinite recursion in case of cyclic graphs.
-     * It also prevents nodes upstream from a branch from being executed twice.
-     *
-     * @param framePosition
-     * @param numFrames
-     * @return
-     */
-    int32_t pullData(int64_t framePosition, int32_t numFrames);
-
-protected:
-    int64_t  mLastFramePosition = -1; // Start at -1 so that the first pull works.
-
-private:
-    int32_t  mFramesValid = 0; // num valid frames in the block
-};
-
-/***************************************************************************/
-/**
-  * This is a connector that allows data to flow between modules.
-  */
-class AudioPort {
-public:
-    AudioPort(AudioProcessorBase &parent, int32_t samplesPerFrame)
-            : mParent(parent)
-            , mSamplesPerFrame(samplesPerFrame) {
-    }
-
-    // Ports are often declared public. So let's make them non-copyable.
-    AudioPort(const AudioPort&) = delete;
-    AudioPort& operator=(const AudioPort&) = delete;
-
-    int32_t getSamplesPerFrame() const {
-        return mSamplesPerFrame;
-    }
-
-protected:
-    AudioProcessorBase &mParent;
-
-private:
-    const int32_t    mSamplesPerFrame = 1;
-};
-
-/***************************************************************************/
-/**
- * This port contains a float type buffer.
- * The size is framesPerBlock * samplesPerFrame).
- */
-class AudioFloatBlockPort  : public AudioPort {
-public:
-    AudioFloatBlockPort(AudioProcessorBase &mParent,
-                   int32_t samplesPerFrame,
-                   int32_t framesPerBlock = kDefaultBlockSize
-                );
-
-    virtual ~AudioFloatBlockPort();
-
-    int32_t getFramesPerBlock() const {
-        return mFramesPerBlock;
-    }
-
-protected:
-
-    /**
-     * @return buffer internal to the port or from a connected port
-     */
-    virtual float *getBlock() {
-        return mSampleBlock;
-    }
-
-
-private:
-    const int32_t    mFramesPerBlock = 1;
-    float           *mSampleBlock = nullptr; // allocated in constructor
-};
-
-/***************************************************************************/
-/**
-  * The results of a module are stored in the buffer of the output ports.
-  */
-class AudioFloatOutputPort : public AudioFloatBlockPort {
-public:
-    AudioFloatOutputPort(AudioProcessorBase &parent, int32_t samplesPerFrame)
-            : AudioFloatBlockPort(parent, samplesPerFrame) {
-    }
-
-    virtual ~AudioFloatOutputPort() = default;
-
-    using AudioFloatBlockPort::getBlock;
-
-    /**
-     * Call the parent module's onProcess() method.
-     * That may pull data from its inputs and recursively
-     * process the entire graph.
-     * @return number of frames actually pulled
-     */
-    int32_t pullData(int64_t framePosition, int32_t numFrames);
-
-    /**
-     * Connect to the input of another module.
-     * An input port can only have one connection.
-     * An output port can have multiple connections.
-     * If you connect a second output port to an input port
-     * then it overwrites the previous connection.
-     *
-     * This not thread safe. Do not modify the graph topology form another thread while running.
-     */
-    void connect(AudioFloatInputPort *port);
-
-    /**
-     * Disconnect from the input of another module.
-     * This not thread safe.
-     */
-    void disconnect(AudioFloatInputPort *port);
-};
-
-/***************************************************************************/
-class AudioFloatInputPort : public AudioFloatBlockPort {
-public:
-    AudioFloatInputPort(AudioProcessorBase &parent, int32_t samplesPerFrame)
-            : AudioFloatBlockPort(parent, samplesPerFrame) {
-    }
-
-    virtual ~AudioFloatInputPort() = default;
-
-    /**
-     * If connected to an output port then this will return
-     * that output ports buffers.
-     * If not connected then it returns the input ports own buffer
-     * which can be loaded using setValue().
-     */
-    float *getBlock() override;
-
-    /**
-     * Pull data from any output port that is connected.
-     */
-    int32_t pullData(int64_t framePosition, int32_t numFrames);
-
-    /**
-     * Write every value of the float buffer.
-     * This value will be ignored if an output port is connected
-     * to this port.
-     */
-    void setValue(float value) {
-        int numFloats = kDefaultBlockSize * getSamplesPerFrame();
-        float *buffer = getBlock();
-        for (int i = 0; i < numFloats; i++) {
-            *buffer++ = value;
-        }
-    }
-
-    /**
-     * Connect to the output of another module.
-     * An input port can only have one connection.
-     * An output port can have multiple connections.
-     * This not thread safe.
-     */
-    void connect(AudioFloatOutputPort *port) {
-        assert(getSamplesPerFrame() == port->getSamplesPerFrame());
-        mConnected = port;
-    }
-
-    void disconnect(AudioFloatOutputPort *port) {
-        assert(mConnected == port);
-        (void) port;
-        mConnected = nullptr;
-    }
-
-    void disconnect() {
-        mConnected = nullptr;
-    }
-
-private:
-    AudioFloatOutputPort *mConnected = nullptr;
-};
-
-/***************************************************************************/
-class AudioSource : public AudioProcessorBase {
-public:
-    explicit AudioSource(int32_t channelCount)
-            : output(*this, channelCount) {
-    }
-
-    virtual ~AudioSource() = default;
-
-    AudioFloatOutputPort output;
-
-    void setData(const void *data, int32_t numFrames) {
-        mData = data;
-        mSizeInFrames = numFrames;
-        mFrameIndex = 0;
-    }
-
-protected:
-    const void *mData = nullptr;
-    int32_t     mSizeInFrames = 0; // number of frames in mData
-    int32_t     mFrameIndex = 0; // index of next frame to be processed
-};
-
-/***************************************************************************/
-class AudioSink : public AudioProcessorBase {
-public:
-    explicit AudioSink(int32_t channelCount)
-            : input(*this, channelCount) {
-    }
-
-    virtual ~AudioSink() = default;
-
-    AudioFloatInputPort input;
-
-    /**
-     * Do nothing. The work happens in the read() method.
-     *
-     * @param framePosition index of first frame to be processed
-     * @param numFrames
-     * @return number of frames actually processed
-     */
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override {
-        (void) framePosition;
-        (void) numFrames;
-        return 0;
-    };
-
-    virtual int32_t read(void *data, int32_t numFrames) = 0;
-
-protected:
-    int32_t pull(int32_t numFrames);
-
-private:
-    int64_t mFramePosition = 0;
-};
-
-} /* namespace flowgraph */
-
-#endif /* FLOWGRAPH_AUDIO_PROCESSOR_BASE_H */
diff --git a/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp b/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp
new file mode 100644
index 0000000..dc80427
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "ChannelCountConverter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+ChannelCountConverter::ChannelCountConverter(
+        int32_t inputChannelCount,
+        int32_t outputChannelCount)
+        : input(*this, inputChannelCount)
+        , output(*this, outputChannelCount) {
+}
+
+ChannelCountConverter::~ChannelCountConverter() = default;
+
+int32_t ChannelCountConverter::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
+    int32_t inputChannelCount = input.getSamplesPerFrame();
+    int32_t outputChannelCount = output.getSamplesPerFrame();
+    for (int i = 0; i < numFrames; i++) {
+        int inputChannel = 0;
+        for (int outputChannel = 0; outputChannel < outputChannelCount; outputChannel++) {
+            // Copy input channels to output channels.
+            // Wrap if we run out of inputs.
+            // Discard if we run out of outputs.
+            outputBuffer[outputChannel] = inputBuffer[inputChannel];
+            inputChannel = (inputChannel == inputChannelCount)
+                    ? 0 : inputChannel + 1;
+        }
+        inputBuffer += inputChannelCount;
+        outputBuffer += outputChannelCount;
+    }
+    return numFrames;
+}
+
diff --git a/media/libaaudio/src/flowgraph/ChannelCountConverter.h b/media/libaaudio/src/flowgraph/ChannelCountConverter.h
new file mode 100644
index 0000000..858f4d4
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/ChannelCountConverter.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
+#define FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
+
+#include <unistd.h>
+#include <sys/types.h>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+/**
+ * Change the number of number of channels without mixing.
+ * When increasing the channel count, duplicate input channels.
+ * When decreasing the channel count, drop input channels.
+ */
+    class ChannelCountConverter : public FlowGraphNode {
+    public:
+        explicit ChannelCountConverter(
+                int32_t inputChannelCount,
+                int32_t outputChannelCount);
+
+        virtual ~ChannelCountConverter();
+
+        int32_t onProcess(int32_t numFrames) override;
+
+        const char *getName() override {
+            return "ChannelCountConverter";
+        }
+
+        FlowGraphPortFloatInput input;
+        FlowGraphPortFloatOutput output;
+    };
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/ClipToRange.cpp b/media/libaaudio/src/flowgraph/ClipToRange.cpp
index bd9c22a..c6ad0b0 100644
--- a/media/libaaudio/src/flowgraph/ClipToRange.cpp
+++ b/media/libaaudio/src/flowgraph/ClipToRange.cpp
@@ -16,25 +16,23 @@
 
 #include <algorithm>
 #include <unistd.h>
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 #include "ClipToRange.h"
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 ClipToRange::ClipToRange(int32_t channelCount)
-        : input(*this, channelCount)
-        , output(*this, channelCount) {
+        : FlowGraphFilter(channelCount) {
 }
 
-int32_t ClipToRange::onProcess(int64_t framePosition, int32_t numFrames) {
-    int32_t framesToProcess = input.pullData(framePosition, numFrames);
-    const float *inputBuffer = input.getBlock();
-    float *outputBuffer = output.getBlock();
+int32_t ClipToRange::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
 
-    int32_t numSamples = framesToProcess * output.getSamplesPerFrame();
+    int32_t numSamples = numFrames * output.getSamplesPerFrame();
     for (int32_t i = 0; i < numSamples; i++) {
         *outputBuffer++ = std::min(mMaximum, std::max(mMinimum, *inputBuffer++));
     }
 
-    return framesToProcess;
+    return numFrames;
 }
diff --git a/media/libaaudio/src/flowgraph/ClipToRange.h b/media/libaaudio/src/flowgraph/ClipToRange.h
index 9eef254..2fddeee 100644
--- a/media/libaaudio/src/flowgraph/ClipToRange.h
+++ b/media/libaaudio/src/flowgraph/ClipToRange.h
@@ -21,22 +21,22 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
 // This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data.
 // It is designed to allow occasional transient peaks.
 constexpr float kDefaultMaxHeadroom = 1.41253754f;
 constexpr float kDefaultMinHeadroom = -kDefaultMaxHeadroom;
 
-class ClipToRange : public AudioProcessorBase {
+class ClipToRange : public FlowGraphFilter {
 public:
     explicit ClipToRange(int32_t channelCount);
 
     virtual ~ClipToRange() = default;
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
 
     void setMinimum(float min) {
         mMinimum = min;
@@ -54,14 +54,15 @@
         return mMaximum;
     }
 
-    AudioFloatInputPort input;
-    AudioFloatOutputPort output;
+    const char *getName() override {
+        return "ClipToRange";
+    }
 
 private:
     float mMinimum = kDefaultMinHeadroom;
     float mMaximum = kDefaultMaxHeadroom;
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_CLIP_TO_RANGE_H
diff --git a/media/libaaudio/src/flowgraph/FlowGraphNode.cpp b/media/libaaudio/src/flowgraph/FlowGraphNode.cpp
new file mode 100644
index 0000000..012abe7
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/FlowGraphNode.cpp
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "stdio.h"
+#include <algorithm>
+#include <sys/types.h>
+#include "FlowGraphNode.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+/***************************************************************************/
+int32_t FlowGraphNode::pullData(int32_t numFrames, int64_t callCount) {
+    int32_t frameCount = numFrames;
+    // Prevent recursion and multiple execution of nodes.
+    if (callCount > mLastCallCount) {
+        mLastCallCount = callCount;
+        if (mDataPulledAutomatically) {
+            // Pull from all the upstream nodes.
+            for (auto &port : mInputPorts) {
+                // TODO fix bug of leaving unused data in some ports if using multiple AudioSource
+                frameCount = port.get().pullData(callCount, frameCount);
+            }
+        }
+        if (frameCount > 0) {
+            frameCount = onProcess(frameCount);
+        }
+        mLastFrameCount = frameCount;
+    } else {
+        frameCount = mLastFrameCount;
+    }
+    return frameCount;
+}
+
+void FlowGraphNode::pullReset() {
+    if (!mBlockRecursion) {
+        mBlockRecursion = true; // for cyclic graphs
+        // Pull reset from all the upstream nodes.
+        for (auto &port : mInputPorts) {
+            port.get().pullReset();
+        }
+        mBlockRecursion = false;
+        reset();
+    }
+}
+
+void FlowGraphNode::reset() {
+    mLastFrameCount = 0;
+    mLastCallCount = kInitialCallCount;
+}
+
+/***************************************************************************/
+FlowGraphPortFloat::FlowGraphPortFloat(FlowGraphNode &parent,
+                               int32_t samplesPerFrame,
+                               int32_t framesPerBuffer)
+        : FlowGraphPort(parent, samplesPerFrame)
+        , mFramesPerBuffer(framesPerBuffer)
+        , mBuffer(nullptr) {
+    size_t numFloats = static_cast<size_t>(framesPerBuffer) * getSamplesPerFrame();
+    mBuffer = std::make_unique<float[]>(numFloats);
+}
+
+/***************************************************************************/
+int32_t FlowGraphPortFloatOutput::pullData(int64_t callCount, int32_t numFrames) {
+    numFrames = std::min(getFramesPerBuffer(), numFrames);
+    return mContainingNode.pullData(numFrames, callCount);
+}
+
+void FlowGraphPortFloatOutput::pullReset() {
+    mContainingNode.pullReset();
+}
+
+// These need to be in the .cpp file because of forward cross references.
+void FlowGraphPortFloatOutput::connect(FlowGraphPortFloatInput *port) {
+    port->connect(this);
+}
+
+void FlowGraphPortFloatOutput::disconnect(FlowGraphPortFloatInput *port) {
+    port->disconnect(this);
+}
+
+/***************************************************************************/
+int32_t FlowGraphPortFloatInput::pullData(int64_t callCount, int32_t numFrames) {
+    return (mConnected == nullptr)
+            ? std::min(getFramesPerBuffer(), numFrames)
+            : mConnected->pullData(callCount, numFrames);
+}
+void FlowGraphPortFloatInput::pullReset() {
+    if (mConnected != nullptr) mConnected->pullReset();
+}
+
+float *FlowGraphPortFloatInput::getBuffer() {
+    if (mConnected == nullptr) {
+        return FlowGraphPortFloat::getBuffer(); // loaded using setValue()
+    } else {
+        return mConnected->getBuffer();
+    }
+}
+
+int32_t FlowGraphSink::pullData(int32_t numFrames) {
+    return FlowGraphNode::pullData(numFrames, getLastCallCount() + 1);
+}
diff --git a/media/libaaudio/src/flowgraph/FlowGraphNode.h b/media/libaaudio/src/flowgraph/FlowGraphNode.h
new file mode 100644
index 0000000..2884c08
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/FlowGraphNode.h
@@ -0,0 +1,450 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * FlowGraph.h
+ *
+ * Processing node and ports that can be used in a simple data flow graph.
+ * This was designed to work with audio but could be used for other
+ * types of data.
+ */
+
+#ifndef FLOWGRAPH_FLOW_GRAPH_NODE_H
+#define FLOWGRAPH_FLOW_GRAPH_NODE_H
+
+#include <cassert>
+#include <cstring>
+#include <math.h>
+#include <memory>
+#include <sys/types.h>
+#include <time.h>
+#include <unistd.h>
+#include <vector>
+
+// TODO Move these classes into separate files.
+// TODO Review use of raw pointers for connect(). Maybe use smart pointers but need to avoid
+//      run-time deallocation in audio thread.
+
+// Set flags FLOWGRAPH_ANDROID_INTERNAL and FLOWGRAPH_OUTER_NAMESPACE based on whether compiler
+// flag __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe and not aaudio.
+
+#ifndef FLOWGRAPH_ANDROID_INTERNAL
+#ifdef __ANDROID_NDK__
+#define FLOWGRAPH_ANDROID_INTERNAL 0
+#else
+#define FLOWGRAPH_ANDROID_INTERNAL 1
+#endif // __ANDROID_NDK__
+#endif // FLOWGRAPH_ANDROID_INTERNAL
+
+#ifndef FLOWGRAPH_OUTER_NAMESPACE
+#ifdef __ANDROID_NDK__
+#define FLOWGRAPH_OUTER_NAMESPACE oboe
+#else
+#define FLOWGRAPH_OUTER_NAMESPACE aaudio
+#endif // __ANDROID_NDK__
+#endif // FLOWGRAPH_OUTER_NAMESPACE
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+// Default block size that can be overridden when the FlowGraphPortFloat is created.
+// If it is too small then we will have too much overhead from switching between nodes.
+// If it is too high then we will thrash the caches.
+constexpr int kDefaultBufferSize = 8; // arbitrary
+
+class FlowGraphPort;
+class FlowGraphPortFloatInput;
+
+/***************************************************************************/
+/**
+ * Base class for all nodes in the flowgraph.
+ */
+class FlowGraphNode {
+public:
+    FlowGraphNode() = default;
+    virtual ~FlowGraphNode() = default;
+
+    /**
+     * Read from the input ports,
+     * generate multiple frames of data then write the results to the output ports.
+     *
+     * @param numFrames maximum number of frames requested for processing
+     * @return number of frames actually processed
+     */
+    virtual int32_t onProcess(int32_t numFrames) = 0;
+
+    /**
+     * If the callCount is at or after the previous callCount then call
+     * pullData on all of the upstreamNodes.
+     * Then call onProcess().
+     * This prevents infinite recursion in case of cyclic graphs.
+     * It also prevents nodes upstream from a branch from being executed twice.
+     *
+     * @param callCount
+     * @param numFrames
+     * @return number of frames valid
+     */
+    int32_t pullData(int32_t numFrames, int64_t callCount);
+
+    /**
+     * Recursively reset all the nodes in the graph, starting from a Sink.
+     *
+     * This must not be called at the same time as pullData!
+     */
+    void pullReset();
+
+    /**
+     * Reset framePosition counters.
+     */
+    virtual void reset();
+
+    void addInputPort(FlowGraphPort &port) {
+        mInputPorts.emplace_back(port);
+    }
+
+    bool isDataPulledAutomatically() const {
+        return mDataPulledAutomatically;
+    }
+
+    /**
+     * Set true if you want the data pulled through the graph automatically.
+     * This is the default.
+     *
+     * Set false if you want to pull the data from the input ports in the onProcess() method.
+     * You might do this, for example, in a sample rate converting node.
+     *
+     * @param automatic
+     */
+    void setDataPulledAutomatically(bool automatic) {
+        mDataPulledAutomatically = automatic;
+    }
+
+    virtual const char *getName() {
+        return "FlowGraph";
+    }
+
+    int64_t getLastCallCount() {
+        return mLastCallCount;
+    }
+
+protected:
+
+    static constexpr int64_t  kInitialCallCount = -1;
+    int64_t  mLastCallCount = kInitialCallCount;
+
+    std::vector<std::reference_wrapper<FlowGraphPort>> mInputPorts;
+
+private:
+    bool     mDataPulledAutomatically = true;
+    bool     mBlockRecursion = false;
+    int32_t  mLastFrameCount = 0;
+
+};
+
+/***************************************************************************/
+/**
+  * This is a connector that allows data to flow between modules.
+  *
+  * The ports are the primary means of interacting with a module.
+  * So they are generally declared as public.
+  *
+  */
+class FlowGraphPort {
+public:
+    FlowGraphPort(FlowGraphNode &parent, int32_t samplesPerFrame)
+            : mContainingNode(parent)
+            , mSamplesPerFrame(samplesPerFrame) {
+    }
+
+    virtual ~FlowGraphPort() = default;
+
+    // Ports are often declared public. So let's make them non-copyable.
+    FlowGraphPort(const FlowGraphPort&) = delete;
+    FlowGraphPort& operator=(const FlowGraphPort&) = delete;
+
+    int32_t getSamplesPerFrame() const {
+        return mSamplesPerFrame;
+    }
+
+    virtual int32_t pullData(int64_t framePosition, int32_t numFrames) = 0;
+
+    virtual void pullReset() {}
+
+protected:
+    FlowGraphNode &mContainingNode;
+
+private:
+    const int32_t    mSamplesPerFrame = 1;
+};
+
+/***************************************************************************/
+/**
+ * This port contains a 32-bit float buffer that can contain several frames of data.
+ * Processing the data in a block improves performance.
+ *
+ * The size is framesPerBuffer * samplesPerFrame).
+ */
+class FlowGraphPortFloat  : public FlowGraphPort {
+public:
+    FlowGraphPortFloat(FlowGraphNode &parent,
+                   int32_t samplesPerFrame,
+                   int32_t framesPerBuffer = kDefaultBufferSize
+                );
+
+    virtual ~FlowGraphPortFloat() = default;
+
+    int32_t getFramesPerBuffer() const {
+        return mFramesPerBuffer;
+    }
+
+protected:
+
+    /**
+     * @return buffer internal to the port or from a connected port
+     */
+    virtual float *getBuffer() {
+        return mBuffer.get();
+    }
+
+private:
+    const int32_t    mFramesPerBuffer = 1;
+    std::unique_ptr<float[]> mBuffer; // allocated in constructor
+};
+
+/***************************************************************************/
+/**
+  * The results of a node's processing are stored in the buffers of the output ports.
+  */
+class FlowGraphPortFloatOutput : public FlowGraphPortFloat {
+public:
+    FlowGraphPortFloatOutput(FlowGraphNode &parent, int32_t samplesPerFrame)
+            : FlowGraphPortFloat(parent, samplesPerFrame) {
+    }
+
+    virtual ~FlowGraphPortFloatOutput() = default;
+
+    using FlowGraphPortFloat::getBuffer;
+
+    /**
+     * Connect to the input of another module.
+     * An input port can only have one connection.
+     * An output port can have multiple connections.
+     * If you connect a second output port to an input port
+     * then it overwrites the previous connection.
+     *
+     * This not thread safe. Do not modify the graph topology from another thread while running.
+     * Also do not delete a module while it is connected to another port if the graph is running.
+     */
+    void connect(FlowGraphPortFloatInput *port);
+
+    /**
+     * Disconnect from the input of another module.
+     * This not thread safe.
+     */
+    void disconnect(FlowGraphPortFloatInput *port);
+
+    /**
+     * Call the parent module's onProcess() method.
+     * That may pull data from its inputs and recursively
+     * process the entire graph.
+     * @return number of frames actually pulled
+     */
+    int32_t pullData(int64_t framePosition, int32_t numFrames) override;
+
+
+    void pullReset() override;
+
+};
+
+/***************************************************************************/
+
+/**
+ * An input port for streaming audio data.
+ * You can set a value that will be used for processing.
+ * If you connect an output port to this port then its value will be used instead.
+ */
+class FlowGraphPortFloatInput : public FlowGraphPortFloat {
+public:
+    FlowGraphPortFloatInput(FlowGraphNode &parent, int32_t samplesPerFrame)
+            : FlowGraphPortFloat(parent, samplesPerFrame) {
+        // Add to parent so it can pull data from each input.
+        parent.addInputPort(*this);
+    }
+
+    virtual ~FlowGraphPortFloatInput() = default;
+
+    /**
+     * If connected to an output port then this will return
+     * that output ports buffers.
+     * If not connected then it returns the input ports own buffer
+     * which can be loaded using setValue().
+     */
+    float *getBuffer() override;
+
+    /**
+     * Write every value of the float buffer.
+     * This value will be ignored if an output port is connected
+     * to this port.
+     */
+    void setValue(float value) {
+        int numFloats = kDefaultBufferSize * getSamplesPerFrame();
+        float *buffer = getBuffer();
+        for (int i = 0; i < numFloats; i++) {
+            *buffer++ = value;
+        }
+    }
+
+    /**
+     * Connect to the output of another module.
+     * An input port can only have one connection.
+     * An output port can have multiple connections.
+     * This not thread safe.
+     */
+    void connect(FlowGraphPortFloatOutput *port) {
+        assert(getSamplesPerFrame() == port->getSamplesPerFrame());
+        mConnected = port;
+    }
+
+    void disconnect(FlowGraphPortFloatOutput *port) {
+        assert(mConnected == port);
+        (void) port;
+        mConnected = nullptr;
+    }
+
+    void disconnect() {
+        mConnected = nullptr;
+    }
+
+    /**
+     * Pull data from any output port that is connected.
+     */
+    int32_t pullData(int64_t framePosition, int32_t numFrames) override;
+
+    void pullReset() override;
+
+private:
+    FlowGraphPortFloatOutput *mConnected = nullptr;
+};
+
+/***************************************************************************/
+
+/**
+ * Base class for an edge node in a graph that has no upstream nodes.
+ * It outputs data but does not consume data.
+ * By default, it will read its data from an external buffer.
+ */
+class FlowGraphSource : public FlowGraphNode {
+public:
+    explicit FlowGraphSource(int32_t channelCount)
+            : output(*this, channelCount) {
+    }
+
+    virtual ~FlowGraphSource() = default;
+
+    FlowGraphPortFloatOutput output;
+};
+
+/***************************************************************************/
+
+/**
+ * Base class for an edge node in a graph that has no upstream nodes.
+ * It outputs data but does not consume data.
+ * By default, it will read its data from an external buffer.
+ */
+class FlowGraphSourceBuffered : public FlowGraphSource {
+public:
+    explicit FlowGraphSourceBuffered(int32_t channelCount)
+            : FlowGraphSource(channelCount) {}
+
+    virtual ~FlowGraphSourceBuffered() = default;
+
+    /**
+     * Specify buffer that the node will read from.
+     *
+     * @param data TODO Consider using std::shared_ptr.
+     * @param numFrames
+     */
+    void setData(const void *data, int32_t numFrames) {
+        mData = data;
+        mSizeInFrames = numFrames;
+        mFrameIndex = 0;
+    }
+
+protected:
+    const void *mData = nullptr;
+    int32_t     mSizeInFrames = 0; // number of frames in mData
+    int32_t     mFrameIndex = 0; // index of next frame to be processed
+};
+
+/***************************************************************************/
+/**
+ * Base class for an edge node in a graph that has no downstream nodes.
+ * It consumes data but does not output data.
+ * This graph will be executed when data is read() from this node
+ * by pulling data from upstream nodes.
+ */
+class FlowGraphSink : public FlowGraphNode {
+public:
+    explicit FlowGraphSink(int32_t channelCount)
+            : input(*this, channelCount) {
+    }
+
+    virtual ~FlowGraphSink() = default;
+
+    FlowGraphPortFloatInput input;
+
+    /**
+     * Do nothing. The work happens in the read() method.
+     *
+     * @param numFrames
+     * @return number of frames actually processed
+     */
+    int32_t onProcess(int32_t numFrames) override {
+        return numFrames;
+    }
+
+    virtual int32_t read(void *data, int32_t numFrames) = 0;
+
+protected:
+    /**
+     * Pull data through the graph using this nodes last callCount.
+     * @param numFrames
+     * @return
+     */
+    int32_t pullData(int32_t numFrames);
+};
+
+/***************************************************************************/
+/**
+ * Base class for a node that has an input and an output with the same number of channels.
+ * This may include traditional filters, eg. FIR, but also include
+ * any processing node that converts input to output.
+ */
+class FlowGraphFilter : public FlowGraphNode {
+public:
+    explicit FlowGraphFilter(int32_t channelCount)
+            : input(*this, channelCount)
+            , output(*this, channelCount) {
+    }
+
+    virtual ~FlowGraphFilter() = default;
+
+    FlowGraphPortFloatInput input;
+    FlowGraphPortFloatOutput output;
+};
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif /* FLOWGRAPH_FLOW_GRAPH_NODE_H */
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
index b750410..5e90588 100644
--- a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -19,7 +19,7 @@
 
 #include <unistd.h>
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 class FlowgraphUtilities {
 public:
@@ -39,9 +39,9 @@
     static const float limneg = -1.;
 
     if (f <= limneg) {
-        return -0x80000000; /* or 0x80000000 */
+        return INT32_MIN;
     } else if (f >= limpos) {
-        return 0x7fffffff;
+        return INT32_MAX;
     }
     f *= scale;
     /* integer conversion is through truncation (though int to float is not).
diff --git a/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp b/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp
new file mode 100644
index 0000000..4f973bc
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <unistd.h>
+
+#include "ManyToMultiConverter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+ManyToMultiConverter::ManyToMultiConverter(int32_t channelCount)
+        : inputs(channelCount)
+        , output(*this, channelCount) {
+    for (int i = 0; i < channelCount; i++) {
+        inputs[i] = std::make_unique<FlowGraphPortFloatInput>(*this, 1);
+    }
+}
+
+int32_t ManyToMultiConverter::onProcess(int32_t numFrames) {
+    int32_t channelCount = output.getSamplesPerFrame();
+
+    for (int ch = 0; ch < channelCount; ch++) {
+        const float *inputBuffer = inputs[ch]->getBuffer();
+        float *outputBuffer = output.getBuffer() + ch;
+
+        for (int i = 0; i < numFrames; i++) {
+            // read one, write into the proper interleaved output channel
+            float sample = *inputBuffer++;
+            *outputBuffer = sample;
+            outputBuffer += channelCount; // advance to next multichannel frame
+        }
+    }
+    return numFrames;
+}
+
diff --git a/media/libaaudio/src/flowgraph/ManyToMultiConverter.h b/media/libaaudio/src/flowgraph/ManyToMultiConverter.h
new file mode 100644
index 0000000..50644cf
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/ManyToMultiConverter.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H
+#define FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H
+
+#include <unistd.h>
+#include <sys/types.h>
+#include <vector>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+/**
+ * Combine multiple mono inputs into one interleaved multi-channel output.
+ */
+class ManyToMultiConverter : public flowgraph::FlowGraphNode {
+public:
+    explicit ManyToMultiConverter(int32_t channelCount);
+
+    virtual ~ManyToMultiConverter() = default;
+
+    int32_t onProcess(int numFrames) override;
+
+    void setEnabled(bool /*enabled*/) {}
+
+    std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatInput>> inputs;
+    flowgraph::FlowGraphPortFloatOutput output;
+
+    const char *getName() override {
+        return "ManyToMultiConverter";
+    }
+
+private:
+};
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/MonoBlend.cpp b/media/libaaudio/src/flowgraph/MonoBlend.cpp
new file mode 100644
index 0000000..4fd75e1
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/MonoBlend.cpp
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <unistd.h>
+
+#include "MonoBlend.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+MonoBlend::MonoBlend(int32_t channelCount)
+        : FlowGraphFilter(channelCount)
+        , mInvChannelCount(1. / channelCount)
+{
+}
+
+int32_t MonoBlend::onProcess(int32_t numFrames) {
+    int32_t channelCount = output.getSamplesPerFrame();
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
+
+    for (size_t i = 0; i < numFrames; ++i) {
+        float accum = 0;
+        for (size_t j = 0; j < channelCount; ++j) {
+            accum += *inputBuffer++;
+        }
+        accum *= mInvChannelCount;
+        for (size_t j = 0; j < channelCount; ++j) {
+            *outputBuffer++ = accum;
+        }
+    }
+
+    return numFrames;
+}
diff --git a/media/libaaudio/src/flowgraph/MonoBlend.h b/media/libaaudio/src/flowgraph/MonoBlend.h
new file mode 100644
index 0000000..f8d44ff
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/MonoBlend.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_MONO_BLEND_H
+#define FLOWGRAPH_MONO_BLEND_H
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+/**
+ * Combine data between multiple channels so each channel is an average
+ * of all channels.
+ */
+class MonoBlend : public FlowGraphFilter {
+public:
+    explicit MonoBlend(int32_t channelCount);
+
+    virtual ~MonoBlend() = default;
+
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "MonoBlend";
+    }
+private:
+    const float mInvChannelCount;
+};
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_MONO_BLEND
diff --git a/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp b/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp
index 78aad52..33eed52 100644
--- a/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp
+++ b/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp
@@ -14,34 +14,28 @@
  * limitations under the License.
  */
 
-
 #include <unistd.h>
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 #include "MonoToMultiConverter.h"
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
-MonoToMultiConverter::MonoToMultiConverter(int32_t channelCount)
+MonoToMultiConverter::MonoToMultiConverter(int32_t outputChannelCount)
         : input(*this, 1)
-        , output(*this, channelCount) {
+        , output(*this, outputChannelCount) {
 }
 
-MonoToMultiConverter::~MonoToMultiConverter() { }
-
-int32_t MonoToMultiConverter::onProcess(int64_t framePosition, int32_t numFrames) {
-    int32_t framesToProcess = input.pullData(framePosition, numFrames);
-
-    const float *inputBuffer = input.getBlock();
-    float *outputBuffer = output.getBlock();
+int32_t MonoToMultiConverter::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
     int32_t channelCount = output.getSamplesPerFrame();
-    // TODO maybe move to audio_util as audio_mono_to_multi()
-    for (int i = 0; i < framesToProcess; i++) {
+    for (int i = 0; i < numFrames; i++) {
         // read one, write many
         float sample = *inputBuffer++;
         for (int channel = 0; channel < channelCount; channel++) {
             *outputBuffer++ = sample;
         }
     }
-    return framesToProcess;
+    return numFrames;
 }
 
diff --git a/media/libaaudio/src/flowgraph/MonoToMultiConverter.h b/media/libaaudio/src/flowgraph/MonoToMultiConverter.h
index 34d53c7..762edb0 100644
--- a/media/libaaudio/src/flowgraph/MonoToMultiConverter.h
+++ b/media/libaaudio/src/flowgraph/MonoToMultiConverter.h
@@ -14,29 +14,36 @@
  * limitations under the License.
  */
 
-
 #ifndef FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H
 #define FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H
 
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class MonoToMultiConverter : public AudioProcessorBase {
+/**
+ * Convert a monophonic stream to a multi-channel interleaved stream
+ * with the same signal on each channel.
+ */
+class MonoToMultiConverter : public FlowGraphNode {
 public:
-    explicit MonoToMultiConverter(int32_t channelCount);
+    explicit MonoToMultiConverter(int32_t outputChannelCount);
 
-    virtual ~MonoToMultiConverter();
+    virtual ~MonoToMultiConverter() = default;
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
 
-    AudioFloatInputPort input;
-    AudioFloatOutputPort output;
+    const char *getName() override {
+        return "MonoToMultiConverter";
+    }
+
+    FlowGraphPortFloatInput input;
+    FlowGraphPortFloatOutput output;
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp b/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp
new file mode 100644
index 0000000..5cdf594
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "MultiToManyConverter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+MultiToManyConverter::MultiToManyConverter(int32_t channelCount)
+        : outputs(channelCount)
+        , input(*this, channelCount) {
+    for (int i = 0; i < channelCount; i++) {
+        outputs[i] = std::make_unique<FlowGraphPortFloatOutput>(*this, 1);
+    }
+}
+
+MultiToManyConverter::~MultiToManyConverter() = default;
+
+int32_t MultiToManyConverter::onProcess(int32_t numFrames) {
+    int32_t channelCount = input.getSamplesPerFrame();
+
+    for (int ch = 0; ch < channelCount; ch++) {
+        const float *inputBuffer = input.getBuffer() + ch;
+        float *outputBuffer = outputs[ch]->getBuffer();
+
+        for (int i = 0; i < numFrames; i++) {
+            *outputBuffer++ = *inputBuffer;
+            inputBuffer += channelCount;
+        }
+    }
+
+    return numFrames;
+}
diff --git a/media/libaaudio/src/flowgraph/MultiToManyConverter.h b/media/libaaudio/src/flowgraph/MultiToManyConverter.h
new file mode 100644
index 0000000..dee40a2
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/MultiToManyConverter.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
+#define FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
+
+#include <unistd.h>
+#include <sys/types.h>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+/**
+ * Convert a multi-channel interleaved stream to multiple mono-channel
+ * outputs
+ */
+    class MultiToManyConverter : public FlowGraphNode {
+    public:
+        explicit MultiToManyConverter(int32_t channelCount);
+
+        virtual ~MultiToManyConverter();
+
+        int32_t onProcess(int32_t numFrames) override;
+
+        const char *getName() override {
+            return "MultiToManyConverter";
+        }
+
+        std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatOutput>> outputs;
+        flowgraph::FlowGraphPortFloatInput input;
+    };
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
\ No newline at end of file
diff --git a/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp b/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp
new file mode 100644
index 0000000..467f95e
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "MultiToMonoConverter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+
+MultiToMonoConverter::MultiToMonoConverter(int32_t inputChannelCount)
+        : input(*this, inputChannelCount)
+        , output(*this, 1) {
+}
+
+MultiToMonoConverter::~MultiToMonoConverter() = default;
+
+int32_t MultiToMonoConverter::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
+    int32_t channelCount = input.getSamplesPerFrame();
+    for (int i = 0; i < numFrames; i++) {
+        // read first channel of multi stream, write many
+        *outputBuffer++ = *inputBuffer;
+        inputBuffer += channelCount;
+    }
+    return numFrames;
+}
+
diff --git a/media/libaaudio/src/flowgraph/MultiToMonoConverter.h b/media/libaaudio/src/flowgraph/MultiToMonoConverter.h
new file mode 100644
index 0000000..bf5b7b6
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/MultiToMonoConverter.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
+#define FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
+
+#include <unistd.h>
+#include <sys/types.h>
+
+#include "FlowGraphNode.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+/**
+ * Convert a multi-channel interleaved stream to a monophonic stream
+ * by extracting channel[0].
+ */
+    class MultiToMonoConverter : public FlowGraphNode {
+    public:
+        explicit MultiToMonoConverter(int32_t inputChannelCount);
+
+        virtual ~MultiToMonoConverter();
+
+        int32_t onProcess(int32_t numFrames) override;
+
+        const char *getName() override {
+            return "MultiToMonoConverter";
+        }
+
+        FlowGraphPortFloatInput input;
+        FlowGraphPortFloatOutput output;
+    };
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/RampLinear.cpp b/media/libaaudio/src/flowgraph/RampLinear.cpp
index a260828..80ac72a 100644
--- a/media/libaaudio/src/flowgraph/RampLinear.cpp
+++ b/media/libaaudio/src/flowgraph/RampLinear.cpp
@@ -14,20 +14,15 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "RampLinear"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
-
 #include <algorithm>
 #include <unistd.h>
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 #include "RampLinear.h"
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 RampLinear::RampLinear(int32_t channelCount)
-        : input(*this, channelCount)
-        , output(*this, channelCount) {
+        : FlowGraphFilter(channelCount) {
     mTarget.store(1.0f);
 }
 
@@ -37,16 +32,19 @@
 
 void RampLinear::setTarget(float target) {
     mTarget.store(target);
+    // If the ramp has not been used then start immediately at this level.
+    if (mLastCallCount == kInitialCallCount) {
+        forceCurrent(target);
+    }
 }
 
 float RampLinear::interpolateCurrent() {
     return mLevelTo - (mRemaining * mScaler);
 }
 
-int32_t RampLinear::onProcess(int64_t framePosition, int32_t numFrames) {
-    int32_t framesToProcess = input.pullData(framePosition, numFrames);
-    const float *inputBuffer = input.getBlock();
-    float *outputBuffer = output.getBlock();
+int32_t RampLinear::onProcess(int32_t numFrames) {
+    const float *inputBuffer = input.getBuffer();
+    float *outputBuffer = output.getBuffer();
     int32_t channelCount = output.getSamplesPerFrame();
 
     float target = getTarget();
@@ -55,12 +53,10 @@
         mLevelFrom = interpolateCurrent();
         mLevelTo = target;
         mRemaining = mLengthInFrames;
-        ALOGV("%s() mLevelFrom = %f, mLevelTo = %f, mRemaining = %d, mScaler = %f",
-              __func__, mLevelFrom, mLevelTo, mRemaining, mScaler);
         mScaler = (mLevelTo - mLevelFrom) / mLengthInFrames; // for interpolation
     }
 
-    int32_t framesLeft = framesToProcess;
+    int32_t framesLeft = numFrames;
 
     if (mRemaining > 0) { // Ramping? This doesn't happen very often.
         int32_t framesToRamp = std::min(framesLeft, mRemaining);
@@ -81,5 +77,5 @@
         *outputBuffer++ = *inputBuffer++ * mLevelTo;
     }
 
-    return framesToProcess;
+    return numFrames;
 }
diff --git a/media/libaaudio/src/flowgraph/RampLinear.h b/media/libaaudio/src/flowgraph/RampLinear.h
index bdc8f41..3839d6e 100644
--- a/media/libaaudio/src/flowgraph/RampLinear.h
+++ b/media/libaaudio/src/flowgraph/RampLinear.h
@@ -21,17 +21,25 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class RampLinear : public AudioProcessorBase {
+/**
+ * When the target is modified then the output will ramp smoothly
+ * between the original and the new target value.
+ * This can be used to smooth out control values and reduce pops.
+ *
+ * The target may be updated while a ramp is in progress, which will trigger
+ * a new ramp from the current value.
+ */
+class RampLinear : public FlowGraphFilter {
 public:
     explicit RampLinear(int32_t channelCount);
 
     virtual ~RampLinear() = default;
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
 
     /**
      * This is used for the next ramp.
@@ -66,8 +74,9 @@
         mLevelTo = level;
     }
 
-    AudioFloatInputPort input;
-    AudioFloatOutputPort output;
+    const char *getName() override {
+        return "RampLinear";
+    }
 
 private:
 
@@ -82,6 +91,6 @@
     float               mLevelTo         = 0.0f;
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_RAMP_LINEAR_H
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
new file mode 100644
index 0000000..a15fcb8
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SampleRateConverter.h"
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+SampleRateConverter::SampleRateConverter(int32_t channelCount,
+                                         MultiChannelResampler &resampler)
+        : FlowGraphFilter(channelCount)
+        , mResampler(resampler) {
+    setDataPulledAutomatically(false);
+}
+
+void SampleRateConverter::reset() {
+    FlowGraphNode::reset();
+    mInputCursor = kInitialCallCount;
+}
+
+// Return true if there is a sample available.
+bool SampleRateConverter::isInputAvailable() {
+    // If we have consumed all of the input data then go out and get some more.
+    if (mInputCursor >= mNumValidInputFrames) {
+        mInputCallCount++;
+        mNumValidInputFrames = input.pullData(mInputCallCount, input.getFramesPerBuffer());
+        mInputCursor = 0;
+    }
+    return (mInputCursor < mNumValidInputFrames);
+}
+
+const float *SampleRateConverter::getNextInputFrame() {
+    const float *inputBuffer = input.getBuffer();
+    return &inputBuffer[mInputCursor++ * input.getSamplesPerFrame()];
+}
+
+int32_t SampleRateConverter::onProcess(int32_t numFrames) {
+    float *outputBuffer = output.getBuffer();
+    int32_t channelCount = output.getSamplesPerFrame();
+    int framesLeft = numFrames;
+    while (framesLeft > 0) {
+        // Gather input samples as needed.
+        if(mResampler.isWriteNeeded()) {
+            if (isInputAvailable()) {
+                const float *frame = getNextInputFrame();
+                mResampler.writeNextFrame(frame);
+            } else {
+                break;
+            }
+        } else {
+            // Output frame is interpolated from input samples.
+            mResampler.readNextFrame(outputBuffer);
+            outputBuffer += channelCount;
+            framesLeft--;
+        }
+    }
+    return numFrames - framesLeft;
+}
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.h b/media/libaaudio/src/flowgraph/SampleRateConverter.h
new file mode 100644
index 0000000..f883e6c
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
+#define FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
+
+#include <unistd.h>
+#include <sys/types.h>
+
+#include "FlowGraphNode.h"
+#include "resampler/MultiChannelResampler.h"
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+
+class SampleRateConverter : public FlowGraphFilter {
+public:
+    explicit SampleRateConverter(int32_t channelCount,
+                                 resampler::MultiChannelResampler &mResampler);
+
+    virtual ~SampleRateConverter() = default;
+
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SampleRateConverter";
+    }
+
+    void reset() override;
+
+private:
+
+    // Return true if there is a sample available.
+    bool isInputAvailable();
+
+    // This assumes data is available. Only call after calling isInputAvailable().
+    const float *getNextInputFrame();
+
+    resampler::MultiChannelResampler &mResampler;
+
+    int32_t mInputCursor = 0;         // offset into the input port buffer
+    int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
+    // We need our own callCount for upstream calls because calls occur at a different rate.
+    // This means we cannot have cyclic graphs or merges that contain an SRC.
+    int64_t mInputCallCount = 0;
+
+};
+
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
+
+#endif //FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/SinkFloat.cpp b/media/libaaudio/src/flowgraph/SinkFloat.cpp
index fb3dcbc..940a66b 100644
--- a/media/libaaudio/src/flowgraph/SinkFloat.cpp
+++ b/media/libaaudio/src/flowgraph/SinkFloat.cpp
@@ -16,31 +16,31 @@
 
 #include <algorithm>
 #include <unistd.h>
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 #include "SinkFloat.h"
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SinkFloat::SinkFloat(int32_t channelCount)
-        : AudioSink(channelCount) {
+        : FlowGraphSink(channelCount) {
 }
 
 int32_t SinkFloat::read(void *data, int32_t numFrames) {
     float *floatData = (float *) data;
-    int32_t channelCount = input.getSamplesPerFrame();
+    const int32_t channelCount = input.getSamplesPerFrame();
 
     int32_t framesLeft = numFrames;
     while (framesLeft > 0) {
         // Run the graph and pull data through the input port.
-        int32_t framesRead = pull(framesLeft);
-        if (framesRead <= 0) {
+        int32_t framesPulled = pullData(framesLeft);
+        if (framesPulled <= 0) {
             break;
         }
-        const float *signal = input.getBlock();
-        int32_t numSamples = framesRead * channelCount;
+        const float *signal = input.getBuffer();
+        int32_t numSamples = framesPulled * channelCount;
         memcpy(floatData, signal, numSamples * sizeof(float));
         floatData += numSamples;
-        framesLeft -= framesRead;
+        framesLeft -= framesPulled;
     }
     return numFrames - framesLeft;
 }
diff --git a/media/libaaudio/src/flowgraph/SinkFloat.h b/media/libaaudio/src/flowgraph/SinkFloat.h
index 7775c08..3be3f5d 100644
--- a/media/libaaudio/src/flowgraph/SinkFloat.h
+++ b/media/libaaudio/src/flowgraph/SinkFloat.h
@@ -21,18 +21,25 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SinkFloat : public AudioSink {
+/**
+ * AudioSink that lets you read data as 32-bit floats.
+ */
+class SinkFloat : public FlowGraphSink {
 public:
     explicit SinkFloat(int32_t channelCount);
+    ~SinkFloat() override = default;
 
     int32_t read(void *data, int32_t numFrames) override;
 
+    const char *getName() override {
+        return "SinkFloat";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SINK_FLOAT_H
diff --git a/media/libaaudio/src/flowgraph/SinkI16.cpp b/media/libaaudio/src/flowgraph/SinkI16.cpp
index ffec8f5..690431c 100644
--- a/media/libaaudio/src/flowgraph/SinkI16.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI16.cpp
@@ -17,17 +17,16 @@
 #include <algorithm>
 #include <unistd.h>
 
-#ifdef __ANDROID__
+#include "SinkI16.h"
+
+#if FLOWGRAPH_ANDROID_INTERNAL
 #include <audio_utils/primitives.h>
 #endif
 
-#include "AudioProcessorBase.h"
-#include "SinkI16.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SinkI16::SinkI16(int32_t channelCount)
-        : AudioSink(channelCount) {}
+        : FlowGraphSink(channelCount) {}
 
 int32_t SinkI16::read(void *data, int32_t numFrames) {
     int16_t *shortData = (int16_t *) data;
@@ -36,13 +35,13 @@
     int32_t framesLeft = numFrames;
     while (framesLeft > 0) {
         // Run the graph and pull data through the input port.
-        int32_t framesRead = pull(framesLeft);
+        int32_t framesRead = pullData(framesLeft);
         if (framesRead <= 0) {
             break;
         }
-        const float *signal = input.getBlock();
+        const float *signal = input.getBuffer();
         int32_t numSamples = framesRead * channelCount;
-#ifdef __ANDROID__
+#if FLOWGRAPH_ANDROID_INTERNAL
         memcpy_to_i16_from_float(shortData, signal, numSamples);
         shortData += numSamples;
         signal += numSamples;
diff --git a/media/libaaudio/src/flowgraph/SinkI16.h b/media/libaaudio/src/flowgraph/SinkI16.h
index 6d86266..bf124f5 100644
--- a/media/libaaudio/src/flowgraph/SinkI16.h
+++ b/media/libaaudio/src/flowgraph/SinkI16.h
@@ -20,17 +20,24 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SinkI16 : public AudioSink {
+/**
+ * AudioSink that lets you read data as 16-bit signed integers.
+ */
+class SinkI16 : public FlowGraphSink {
 public:
     explicit SinkI16(int32_t channelCount);
 
     int32_t read(void *data, int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SinkI16";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SINK_I16_H
diff --git a/media/libaaudio/src/flowgraph/SinkI24.cpp b/media/libaaudio/src/flowgraph/SinkI24.cpp
index 0cb077d..d4f68b6 100644
--- a/media/libaaudio/src/flowgraph/SinkI24.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI24.cpp
@@ -15,19 +15,20 @@
  */
 
 #include <algorithm>
-#include <stdint.h>
+#include <unistd.h>
 
-#ifdef __ANDROID__
+
+#include "FlowGraphNode.h"
+#include "SinkI24.h"
+
+#if FLOWGRAPH_ANDROID_INTERNAL
 #include <audio_utils/primitives.h>
 #endif
 
-#include "AudioProcessorBase.h"
-#include "SinkI24.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SinkI24::SinkI24(int32_t channelCount)
-        : AudioSink(channelCount) {}
+        : FlowGraphSink(channelCount) {}
 
 int32_t SinkI24::read(void *data, int32_t numFrames) {
     uint8_t *byteData = (uint8_t *) data;
@@ -36,13 +37,13 @@
     int32_t framesLeft = numFrames;
     while (framesLeft > 0) {
         // Run the graph and pull data through the input port.
-        int32_t framesRead = pull(framesLeft);
+        int32_t framesRead = pullData(framesLeft);
         if (framesRead <= 0) {
             break;
         }
-        const float *floatData = input.getBlock();
+        const float *floatData = input.getBuffer();
         int32_t numSamples = framesRead * channelCount;
-#ifdef __ANDROID__
+#if FLOWGRAPH_ANDROID_INTERNAL
         memcpy_to_p24_from_float(byteData, floatData, numSamples);
         static const int kBytesPerI24Packed = 3;
         byteData += numSamples * kBytesPerI24Packed;
diff --git a/media/libaaudio/src/flowgraph/SinkI24.h b/media/libaaudio/src/flowgraph/SinkI24.h
index 5b9b505..6b4135e 100644
--- a/media/libaaudio/src/flowgraph/SinkI24.h
+++ b/media/libaaudio/src/flowgraph/SinkI24.h
@@ -20,17 +20,25 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SinkI24 : public AudioSink {
+/**
+ * AudioSink that lets you read data as packed 24-bit signed integers.
+ * The sample size is 3 bytes.
+ */
+class SinkI24 : public FlowGraphSink {
 public:
     explicit SinkI24(int32_t channelCount);
 
     int32_t read(void *data, int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SinkI24";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SINK_I24_H
diff --git a/media/libaaudio/src/flowgraph/SinkI32.cpp b/media/libaaudio/src/flowgraph/SinkI32.cpp
index eab863d..b14b3d2 100644
--- a/media/libaaudio/src/flowgraph/SinkI32.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI32.cpp
@@ -14,18 +14,18 @@
  * limitations under the License.
  */
 
-#ifdef __ANDROID__
-#include <audio_utils/primitives.h>
-#endif
-
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 #include "FlowgraphUtilities.h"
 #include "SinkI32.h"
 
-using namespace flowgraph;
+#if FLOWGRAPH_ANDROID_INTERNAL
+#include <audio_utils/primitives.h>
+#endif
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SinkI32::SinkI32(int32_t channelCount)
-        : AudioSink(channelCount) {}
+        : FlowGraphSink(channelCount) {}
 
 int32_t SinkI32::read(void *data, int32_t numFrames) {
     int32_t *intData = (int32_t *) data;
@@ -34,13 +34,13 @@
     int32_t framesLeft = numFrames;
     while (framesLeft > 0) {
         // Run the graph and pull data through the input port.
-        int32_t framesRead = pull(framesLeft);
+        int32_t framesRead = pullData(framesLeft);
         if (framesRead <= 0) {
             break;
         }
-        const float *signal = input.getBlock();
+        const float *signal = input.getBuffer();
         int32_t numSamples = framesRead * channelCount;
-#ifdef __ANDROID__
+#if FLOWGRAPH_ANDROID_INTERNAL
         memcpy_to_i32_from_float(intData, signal, numSamples);
         intData += numSamples;
         signal += numSamples;
diff --git a/media/libaaudio/src/flowgraph/SinkI32.h b/media/libaaudio/src/flowgraph/SinkI32.h
index 09d23b7..35507ea 100644
--- a/media/libaaudio/src/flowgraph/SinkI32.h
+++ b/media/libaaudio/src/flowgraph/SinkI32.h
@@ -19,18 +19,22 @@
 
 #include <stdint.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SinkI32 : public AudioSink {
+class SinkI32 : public FlowGraphSink {
 public:
     explicit SinkI32(int32_t channelCount);
     ~SinkI32() override = default;
 
     int32_t read(void *data, int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SinkI32";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SINK_I32_H
diff --git a/media/libaaudio/src/flowgraph/SourceFloat.cpp b/media/libaaudio/src/flowgraph/SourceFloat.cpp
index 4bb674f..a0c8827 100644
--- a/media/libaaudio/src/flowgraph/SourceFloat.cpp
+++ b/media/libaaudio/src/flowgraph/SourceFloat.cpp
@@ -16,23 +16,22 @@
 
 #include <algorithm>
 #include <unistd.h>
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 #include "SourceFloat.h"
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SourceFloat::SourceFloat(int32_t channelCount)
-        : AudioSource(channelCount) {
+        : FlowGraphSourceBuffered(channelCount) {
 }
 
-int32_t SourceFloat::onProcess(int64_t framePosition, int32_t numFrames) {
+int32_t SourceFloat::onProcess(int32_t numFrames) {
+    float *outputBuffer = output.getBuffer();
+    const int32_t channelCount = output.getSamplesPerFrame();
 
-    float *outputBuffer = output.getBlock();
-    int32_t channelCount = output.getSamplesPerFrame();
-
-    int32_t framesLeft = mSizeInFrames - mFrameIndex;
-    int32_t framesToProcess = std::min(numFrames, framesLeft);
-    int32_t numSamples = framesToProcess * channelCount;
+    const int32_t framesLeft = mSizeInFrames - mFrameIndex;
+    const int32_t framesToProcess = std::min(numFrames, framesLeft);
+    const int32_t numSamples = framesToProcess * channelCount;
 
     const float *floatBase = (float *) mData;
     const float *floatData = &floatBase[mFrameIndex * channelCount];
diff --git a/media/libaaudio/src/flowgraph/SourceFloat.h b/media/libaaudio/src/flowgraph/SourceFloat.h
index e6eed9f..78053e5 100644
--- a/media/libaaudio/src/flowgraph/SourceFloat.h
+++ b/media/libaaudio/src/flowgraph/SourceFloat.h
@@ -20,17 +20,25 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SourceFloat : public AudioSource {
+/**
+ * AudioSource that reads a block of pre-defined float data.
+ */
+class SourceFloat : public FlowGraphSourceBuffered {
 public:
     explicit SourceFloat(int32_t channelCount);
+    ~SourceFloat() override = default;
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SourceFloat";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SOURCE_FLOAT_H
diff --git a/media/libaaudio/src/flowgraph/SourceI16.cpp b/media/libaaudio/src/flowgraph/SourceI16.cpp
index c3fcec2..16cd2b3 100644
--- a/media/libaaudio/src/flowgraph/SourceI16.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI16.cpp
@@ -17,21 +17,21 @@
 #include <algorithm>
 #include <unistd.h>
 
-#ifdef __ANDROID__
+#include "FlowGraphNode.h"
+#include "SourceI16.h"
+
+#if FLOWGRAPH_ANDROID_INTERNAL
 #include <audio_utils/primitives.h>
 #endif
 
-#include "AudioProcessorBase.h"
-#include "SourceI16.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SourceI16::SourceI16(int32_t channelCount)
-        : AudioSource(channelCount) {
+        : FlowGraphSourceBuffered(channelCount) {
 }
 
-int32_t SourceI16::onProcess(int64_t framePosition, int32_t numFrames) {
-    float *floatData = output.getBlock();
+int32_t SourceI16::onProcess(int32_t numFrames) {
+    float *floatData = output.getBuffer();
     int32_t channelCount = output.getSamplesPerFrame();
 
     int32_t framesLeft = mSizeInFrames - mFrameIndex;
@@ -41,7 +41,7 @@
     const int16_t *shortBase = static_cast<const int16_t *>(mData);
     const int16_t *shortData = &shortBase[mFrameIndex * channelCount];
 
-#ifdef __ANDROID__
+#if FLOWGRAPH_ANDROID_INTERNAL
     memcpy_to_float_from_i16(floatData, shortData, numSamples);
 #else
     for (int i = 0; i < numSamples; i++) {
diff --git a/media/libaaudio/src/flowgraph/SourceI16.h b/media/libaaudio/src/flowgraph/SourceI16.h
index 2b116cf..923890c 100644
--- a/media/libaaudio/src/flowgraph/SourceI16.h
+++ b/media/libaaudio/src/flowgraph/SourceI16.h
@@ -20,17 +20,23 @@
 #include <unistd.h>
 #include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
-
-class SourceI16 : public AudioSource {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
+/**
+ * AudioSource that reads a block of pre-defined 16-bit integer data.
+ */
+class SourceI16 : public FlowGraphSourceBuffered {
 public:
     explicit SourceI16(int32_t channelCount);
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SourceI16";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SOURCE_I16_H
diff --git a/media/libaaudio/src/flowgraph/SourceI24.cpp b/media/libaaudio/src/flowgraph/SourceI24.cpp
index 097954e..d54b958 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI24.cpp
@@ -15,25 +15,25 @@
  */
 
 #include <algorithm>
-#include <stdint.h>
+#include <unistd.h>
 
-#ifdef __ANDROID__
+#include "FlowGraphNode.h"
+#include "SourceI24.h"
+
+#if FLOWGRAPH_ANDROID_INTERNAL
 #include <audio_utils/primitives.h>
 #endif
 
-#include "AudioProcessorBase.h"
-#include "SourceI24.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 constexpr int kBytesPerI24Packed = 3;
 
 SourceI24::SourceI24(int32_t channelCount)
-        : AudioSource(channelCount) {
+        : FlowGraphSourceBuffered(channelCount) {
 }
 
-int32_t SourceI24::onProcess(int64_t framePosition, int32_t numFrames) {
-    float *floatData = output.getBlock();
+int32_t SourceI24::onProcess(int32_t numFrames) {
+    float *floatData = output.getBuffer();
     int32_t channelCount = output.getSamplesPerFrame();
 
     int32_t framesLeft = mSizeInFrames - mFrameIndex;
@@ -43,7 +43,7 @@
     const uint8_t *byteBase = (uint8_t *) mData;
     const uint8_t *byteData = &byteBase[mFrameIndex * channelCount * kBytesPerI24Packed];
 
-#ifdef __ANDROID__
+#if FLOWGRAPH_ANDROID_INTERNAL
     memcpy_to_float_from_p24(floatData, byteData, numSamples);
 #else
     static const float scale = 1. / (float)(1UL << 31);
diff --git a/media/libaaudio/src/flowgraph/SourceI24.h b/media/libaaudio/src/flowgraph/SourceI24.h
index 2ed6f18..fb66d4a 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.h
+++ b/media/libaaudio/src/flowgraph/SourceI24.h
@@ -17,19 +17,27 @@
 #ifndef FLOWGRAPH_SOURCE_I24_H
 #define FLOWGRAPH_SOURCE_I24_H
 
-#include <stdint.h>
+#include <unistd.h>
+#include <sys/types.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SourceI24 : public AudioSource {
+/**
+ * AudioSource that reads a block of pre-defined 24-bit packed integer data.
+ */
+class SourceI24 : public FlowGraphSourceBuffered {
 public:
     explicit SourceI24(int32_t channelCount);
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
+
+    const char *getName() override {
+        return "SourceI24";
+    }
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SOURCE_I24_H
diff --git a/media/libaaudio/src/flowgraph/SourceI32.cpp b/media/libaaudio/src/flowgraph/SourceI32.cpp
index e8177ad..b1c8f75 100644
--- a/media/libaaudio/src/flowgraph/SourceI32.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI32.cpp
@@ -17,31 +17,31 @@
 #include <algorithm>
 #include <unistd.h>
 
-#ifdef __ANDROID__
+#include "FlowGraphNode.h"
+#include "SourceI32.h"
+
+#if FLOWGRAPH_ANDROID_INTERNAL
 #include <audio_utils/primitives.h>
 #endif
 
-#include "AudioProcessorBase.h"
-#include "SourceI32.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 SourceI32::SourceI32(int32_t channelCount)
-        : AudioSource(channelCount) {
+        : FlowGraphSourceBuffered(channelCount) {
 }
 
-int32_t SourceI32::onProcess(int64_t framePosition, int32_t numFrames) {
-    float *floatData = output.getBlock();
-    int32_t channelCount = output.getSamplesPerFrame();
+int32_t SourceI32::onProcess(int32_t numFrames) {
+    float *floatData = output.getBuffer();
+    const int32_t channelCount = output.getSamplesPerFrame();
 
-    int32_t framesLeft = mSizeInFrames - mFrameIndex;
-    int32_t framesToProcess = std::min(numFrames, framesLeft);
-    int32_t numSamples = framesToProcess * channelCount;
+    const int32_t framesLeft = mSizeInFrames - mFrameIndex;
+    const int32_t framesToProcess = std::min(numFrames, framesLeft);
+    const int32_t numSamples = framesToProcess * channelCount;
 
     const int32_t *intBase = static_cast<const int32_t *>(mData);
     const int32_t *intData = &intBase[mFrameIndex * channelCount];
 
-#ifdef __ANDROID__
+#if FLOWGRAPH_ANDROID_INTERNAL
     memcpy_to_float_from_i32(floatData, intData, numSamples);
 #else
     for (int i = 0; i < numSamples; i++) {
diff --git a/media/libaaudio/src/flowgraph/SourceI32.h b/media/libaaudio/src/flowgraph/SourceI32.h
index e50f9be..7109469 100644
--- a/media/libaaudio/src/flowgraph/SourceI32.h
+++ b/media/libaaudio/src/flowgraph/SourceI32.h
@@ -19,21 +19,24 @@
 
 #include <stdint.h>
 
-#include "AudioProcessorBase.h"
+#include "FlowGraphNode.h"
 
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
 
-class SourceI32 : public AudioSource {
+class SourceI32 : public FlowGraphSourceBuffered {
 public:
     explicit SourceI32(int32_t channelCount);
     ~SourceI32() override = default;
 
-    int32_t onProcess(int64_t framePosition, int32_t numFrames) override;
+    int32_t onProcess(int32_t numFrames) override;
 
+    const char *getName() override {
+        return "SourceI32";
+    }
 private:
     static constexpr float kScale = 1.0 / (1UL << 31);
 };
 
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
 
 #endif //FLOWGRAPH_SOURCE_I32_H
diff --git a/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h b/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h
new file mode 100644
index 0000000..76ec0e7
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H
+#define RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H
+
+#include <math.h>
+
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+/**
+ * Calculate a HyperbolicCosineWindow window centered at 0.
+ * This can be used in place of a Kaiser window.
+ *
+ * The code is based on an anonymous contribution by "a concerned citizen":
+ * https://dsp.stackexchange.com/questions/37714/kaiser-window-approximation
+ */
+class HyperbolicCosineWindow {
+public:
+    HyperbolicCosineWindow() {
+        setStopBandAttenuation(60);
+    }
+
+    /**
+     * @param attenuation typical values range from 30 to 90 dB
+     * @return beta
+     */
+    double setStopBandAttenuation(double attenuation) {
+        double alpha = ((-325.1e-6 * attenuation + 0.1677) * attenuation) - 3.149;
+        setAlpha(alpha);
+        return alpha;
+    }
+
+    void setAlpha(double alpha) {
+        mAlpha = alpha;
+        mInverseCoshAlpha = 1.0 / cosh(alpha);
+    }
+
+    /**
+     * @param x ranges from -1.0 to +1.0
+     */
+    double operator()(double x) {
+        double x2 = x * x;
+        if (x2 >= 1.0) return 0.0;
+        double w = mAlpha * sqrt(1.0 - x2);
+        return cosh(w) * mInverseCoshAlpha;
+    }
+
+private:
+    double mAlpha = 0.0;
+    double mInverseCoshAlpha = 1.0;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H
diff --git a/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp
new file mode 100644
index 0000000..39e9b24
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IntegerRatio.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+// Enough primes to cover the common sample rates.
+static const int kPrimes[] = {
+        2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
+        43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,
+        101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
+        151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199};
+
+void IntegerRatio::reduce() {
+    for (int prime : kPrimes) {
+        if (mNumerator < prime || mDenominator < prime) {
+            break;
+        }
+
+        // Find biggest prime factor for numerator.
+        while (true) {
+            int top = mNumerator / prime;
+            int bottom = mDenominator / prime;
+            if ((top >= 1)
+                && (bottom >= 1)
+                && (top * prime == mNumerator) // divided evenly?
+                && (bottom * prime == mDenominator)) {
+                mNumerator = top;
+                mDenominator = bottom;
+            } else {
+                break;
+            }
+        }
+
+    }
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h
new file mode 100644
index 0000000..a6b524c
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_INTEGER_RATIO_H
+#define RESAMPLER_INTEGER_RATIO_H
+
+#include <sys/types.h>
+
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+/**
+ * Represent the ratio of two integers.
+ */
+class IntegerRatio {
+public:
+    IntegerRatio(int32_t numerator, int32_t denominator)
+            : mNumerator(numerator), mDenominator(denominator) {}
+
+    /**
+     * Reduce by removing common prime factors.
+     */
+    void reduce();
+
+    int32_t getNumerator() {
+        return mNumerator;
+    }
+
+    int32_t getDenominator() {
+        return mDenominator;
+    }
+
+private:
+    int32_t mNumerator;
+    int32_t mDenominator;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_INTEGER_RATIO_H
diff --git a/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h b/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h
new file mode 100644
index 0000000..f99f9b4
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_KAISER_WINDOW_H
+#define RESAMPLER_KAISER_WINDOW_H
+
+#include <math.h>
+
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+/**
+ * Calculate a Kaiser window centered at 0.
+ */
+class KaiserWindow {
+public:
+    KaiserWindow() {
+        setStopBandAttenuation(60);
+    }
+
+    /**
+     * @param attenuation typical values range from 30 to 90 dB
+     * @return beta
+     */
+    double setStopBandAttenuation(double attenuation) {
+        double beta = 0.0;
+        if (attenuation > 50) {
+            beta = 0.1102 * (attenuation - 8.7);
+        } else if (attenuation >= 21) {
+            double a21 = attenuation - 21;
+            beta = 0.5842 * pow(a21, 0.4) + (0.07886 * a21);
+        }
+        setBeta(beta);
+        return beta;
+    }
+
+    void setBeta(double beta) {
+        mBeta = beta;
+        mInverseBesselBeta = 1.0 / bessel(beta);
+    }
+
+    /**
+     * @param x ranges from -1.0 to +1.0
+     */
+    double operator()(double x) {
+        double x2 = x * x;
+        if (x2 >= 1.0) return 0.0;
+        double w = mBeta * sqrt(1.0 - x2);
+        return bessel(w) * mInverseBesselBeta;
+    }
+
+    // Approximation of a
+    // modified zero order Bessel function of the first kind.
+    // Based on a discussion at:
+    // https://dsp.stackexchange.com/questions/37714/kaiser-window-approximation
+    static double bessel(double x) {
+        double y = cosh(0.970941817426052 * x);
+        y += cosh(0.8854560256532099 * x);
+        y += cosh(0.7485107481711011 * x);
+        y += cosh(0.5680647467311558 * x);
+        y += cosh(0.3546048870425356 * x);
+        y += cosh(0.120536680255323 * x);
+        y *= 2;
+        y += cosh(x);
+        y /= 13;
+        return y;
+    }
+
+private:
+    double mBeta = 0.0;
+    double mInverseBesselBeta = 1.0;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_KAISER_WINDOW_H
diff --git a/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp b/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp
new file mode 100644
index 0000000..cb4932a
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "LinearResampler.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+LinearResampler::LinearResampler(const MultiChannelResampler::Builder &builder)
+        : MultiChannelResampler(builder) {
+    mPreviousFrame = std::make_unique<float[]>(getChannelCount());
+    mCurrentFrame = std::make_unique<float[]>(getChannelCount());
+}
+
+void LinearResampler::writeFrame(const float *frame) {
+    memcpy(mPreviousFrame.get(), mCurrentFrame.get(), sizeof(float) * getChannelCount());
+    memcpy(mCurrentFrame.get(), frame, sizeof(float) * getChannelCount());
+}
+
+void LinearResampler::readFrame(float *frame) {
+    float *previous = mPreviousFrame.get();
+    float *current = mCurrentFrame.get();
+    float phase = (float) getIntegerPhase() / mDenominator;
+    // iterate across samples in the frame
+    for (int channel = 0; channel < getChannelCount(); channel++) {
+        float f0 = *previous++;
+        float f1 = *current++;
+        *frame++ = f0 + (phase * (f1 - f0));
+    }
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/LinearResampler.h b/media/libaaudio/src/flowgraph/resampler/LinearResampler.h
new file mode 100644
index 0000000..5434379
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/LinearResampler.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_LINEAR_RESAMPLER_H
+#define RESAMPLER_LINEAR_RESAMPLER_H
+
+#include <memory>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "MultiChannelResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+/**
+ * Simple resampler that uses bi-linear interpolation.
+ */
+class LinearResampler : public MultiChannelResampler {
+public:
+    explicit LinearResampler(const MultiChannelResampler::Builder &builder);
+
+    void writeFrame(const float *frame) override;
+
+    void readFrame(float *frame) override;
+
+private:
+    std::unique_ptr<float[]> mPreviousFrame;
+    std::unique_ptr<float[]> mCurrentFrame;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_LINEAR_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
new file mode 100644
index 0000000..7193ff3
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <math.h>
+
+#include "IntegerRatio.h"
+#include "LinearResampler.h"
+#include "MultiChannelResampler.h"
+#include "PolyphaseResampler.h"
+#include "PolyphaseResamplerMono.h"
+#include "PolyphaseResamplerStereo.h"
+#include "SincResampler.h"
+#include "SincResamplerStereo.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builder &builder)
+        : mNumTaps(builder.getNumTaps())
+        , mX(static_cast<size_t>(builder.getChannelCount())
+                * static_cast<size_t>(builder.getNumTaps()) * 2)
+        , mSingleFrame(builder.getChannelCount())
+        , mChannelCount(builder.getChannelCount())
+        {
+    // Reduce sample rates to the smallest ratio.
+    // For example 44100/48000 would become 147/160.
+    IntegerRatio ratio(builder.getInputRate(), builder.getOutputRate());
+    ratio.reduce();
+    mNumerator = ratio.getNumerator();
+    mDenominator = ratio.getDenominator();
+    mIntegerPhase = mDenominator;
+}
+
+// static factory method
+MultiChannelResampler *MultiChannelResampler::make(int32_t channelCount,
+                                                   int32_t inputRate,
+                                                   int32_t outputRate,
+                                                   Quality quality) {
+    Builder builder;
+    builder.setInputRate(inputRate);
+    builder.setOutputRate(outputRate);
+    builder.setChannelCount(channelCount);
+
+    switch (quality) {
+        case Quality::Fastest:
+            builder.setNumTaps(2);
+            break;
+        case Quality::Low:
+            builder.setNumTaps(4);
+            break;
+        case Quality::Medium:
+        default:
+            builder.setNumTaps(8);
+            break;
+        case Quality::High:
+            builder.setNumTaps(16);
+            break;
+        case Quality::Best:
+            builder.setNumTaps(32);
+            break;
+    }
+
+    // Set the cutoff frequency so that we do not get aliasing when down-sampling.
+    if (inputRate > outputRate) {
+        builder.setNormalizedCutoff(kDefaultNormalizedCutoff);
+    }
+    return builder.build();
+}
+
+MultiChannelResampler *MultiChannelResampler::Builder::build() {
+    if (getNumTaps() == 2) {
+        // Note that this does not do low pass filteringh.
+        return new LinearResampler(*this);
+    }
+    IntegerRatio ratio(getInputRate(), getOutputRate());
+    ratio.reduce();
+    bool usePolyphase = (getNumTaps() * ratio.getDenominator()) <= kMaxCoefficients;
+    if (usePolyphase) {
+        if (getChannelCount() == 1) {
+            return new PolyphaseResamplerMono(*this);
+        } else if (getChannelCount() == 2) {
+            return new PolyphaseResamplerStereo(*this);
+        } else {
+            return new PolyphaseResampler(*this);
+        }
+    } else {
+        // Use less optimized resampler that uses a float phaseIncrement.
+        // TODO mono resampler
+        if (getChannelCount() == 2) {
+            return new SincResamplerStereo(*this);
+        } else {
+            return new SincResampler(*this);
+        }
+    }
+}
+
+void MultiChannelResampler::writeFrame(const float *frame) {
+    // Move cursor before write so that cursor points to last written frame in read.
+    if (--mCursor < 0) {
+        mCursor = getNumTaps() - 1;
+    }
+    float *dest = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
+    int offset = getNumTaps() * getChannelCount();
+    for (int channel = 0; channel < getChannelCount(); channel++) {
+        // Write twice so we avoid having to wrap when reading.
+        dest[channel] = dest[channel + offset] = frame[channel];
+    }
+}
+
+float MultiChannelResampler::sinc(float radians) {
+    if (abs(radians) < 1.0e-9) return 1.0f;   // avoid divide by zero
+    return sinf(radians) / radians;   // Sinc function
+}
+
+// Generate coefficients in the order they will be used by readFrame().
+// This is more complicated but readFrame() is called repeatedly and should be optimized.
+void MultiChannelResampler::generateCoefficients(int32_t inputRate,
+                                              int32_t outputRate,
+                                              int32_t numRows,
+                                              double phaseIncrement,
+                                              float normalizedCutoff) {
+    mCoefficients.resize(static_cast<size_t>(getNumTaps()) * static_cast<size_t>(numRows));
+    int coefficientIndex = 0;
+    double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
+    // Stretch the sinc function for low pass filtering.
+    const float cutoffScaler = normalizedCutoff *
+            ((outputRate < inputRate)
+             ? ((float)outputRate / inputRate)
+             : ((float)inputRate / outputRate));
+    const int numTapsHalf = getNumTaps() / 2; // numTaps must be even.
+    const float numTapsHalfInverse = 1.0f / numTapsHalf;
+    for (int i = 0; i < numRows; i++) {
+        float tapPhase = phase - numTapsHalf;
+        float gain = 0.0; // sum of raw coefficients
+        int gainCursor = coefficientIndex;
+        for (int tap = 0; tap < getNumTaps(); tap++) {
+            float radians = tapPhase * M_PI;
+
+#if MCR_USE_KAISER
+            float window = mKaiserWindow(tapPhase * numTapsHalfInverse);
+#else
+            float window = mCoshWindow(static_cast<double>(tapPhase) * numTapsHalfInverse);
+#endif
+            float coefficient = sinc(radians * cutoffScaler) * window;
+            mCoefficients.at(coefficientIndex++) = coefficient;
+            gain += coefficient;
+            tapPhase += 1.0;
+        }
+        phase += phaseIncrement;
+        while (phase >= 1.0) {
+            phase -= 1.0;
+        }
+
+        // Correct for gain variations.
+        float gainCorrection = 1.0 / gain; // normalize the gain
+        for (int tap = 0; tap < getNumTaps(); tap++) {
+            mCoefficients.at(gainCursor + tap) *= gainCorrection;
+        }
+    }
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
new file mode 100644
index 0000000..717f3fd
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -0,0 +1,274 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_MULTICHANNEL_RESAMPLER_H
+#define RESAMPLER_MULTICHANNEL_RESAMPLER_H
+
+#include <memory>
+#include <vector>
+#include <sys/types.h>
+#include <unistd.h>
+
+#ifndef MCR_USE_KAISER
+// It appears from the spectrogram that the HyperbolicCosine window leads to fewer artifacts.
+// And it is faster to calculate.
+#define MCR_USE_KAISER 0
+#endif
+
+#if MCR_USE_KAISER
+#include "KaiserWindow.h"
+#else
+#include "HyperbolicCosineWindow.h"
+#endif
+
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+class MultiChannelResampler {
+
+public:
+
+    enum class Quality : int32_t {
+        Fastest,
+        Low,
+        Medium,
+        High,
+        Best,
+    };
+
+    class Builder {
+    public:
+        /**
+         * Construct an optimal resampler based on the specified parameters.
+         * @return address of a resampler
+         */
+        MultiChannelResampler *build();
+
+        /**
+         * The number of taps in the resampling filter.
+         * More taps gives better quality but uses more CPU time.
+         * This typically ranges from 4 to 64. Default is 16.
+         *
+         * For polyphase filters, numTaps must be a multiple of four for loop unrolling.
+         * @param numTaps number of taps for the filter
+         * @return address of this builder for chaining calls
+         */
+        Builder *setNumTaps(int32_t numTaps) {
+            mNumTaps = numTaps;
+            return this;
+        }
+
+        /**
+         * Use 1 for mono, 2 for stereo, etc. Default is 1.
+         *
+         * @param channelCount number of channels
+         * @return address of this builder for chaining calls
+         */
+        Builder *setChannelCount(int32_t channelCount) {
+            mChannelCount = channelCount;
+            return this;
+        }
+
+        /**
+         * Default is 48000.
+         *
+         * @param inputRate sample rate of the input stream
+         * @return address of this builder for chaining calls
+         */
+        Builder *setInputRate(int32_t inputRate) {
+            mInputRate = inputRate;
+            return this;
+        }
+
+        /**
+         * Default is 48000.
+         *
+         * @param outputRate sample rate of the output stream
+         * @return address of this builder for chaining calls
+         */
+        Builder *setOutputRate(int32_t outputRate) {
+            mOutputRate = outputRate;
+            return this;
+        }
+
+        /**
+         * Set cutoff frequency relative to the Nyquist rate of the output sample rate.
+         * Set to 1.0 to match the Nyquist frequency.
+         * Set lower to reduce aliasing.
+         * Default is 0.70.
+         *
+         * @param normalizedCutoff anti-aliasing filter cutoff
+         * @return address of this builder for chaining calls
+         */
+        Builder *setNormalizedCutoff(float normalizedCutoff) {
+            mNormalizedCutoff = normalizedCutoff;
+            return this;
+        }
+
+        int32_t getNumTaps() const {
+            return mNumTaps;
+        }
+
+        int32_t getChannelCount() const {
+            return mChannelCount;
+        }
+
+        int32_t getInputRate() const {
+            return mInputRate;
+        }
+
+        int32_t getOutputRate() const {
+            return mOutputRate;
+        }
+
+        float getNormalizedCutoff() const {
+            return mNormalizedCutoff;
+        }
+
+    protected:
+        int32_t mChannelCount = 1;
+        int32_t mNumTaps = 16;
+        int32_t mInputRate = 48000;
+        int32_t mOutputRate = 48000;
+        float   mNormalizedCutoff = kDefaultNormalizedCutoff;
+    };
+
+    virtual ~MultiChannelResampler() = default;
+
+    /**
+     * Factory method for making a resampler that is optimal for the given inputs.
+     *
+     * @param channelCount number of channels, 2 for stereo
+     * @param inputRate sample rate of the input stream
+     * @param outputRate  sample rate of the output stream
+     * @param quality higher quality sounds better but uses more CPU
+     * @return an optimal resampler
+     */
+    static MultiChannelResampler *make(int32_t channelCount,
+                                       int32_t inputRate,
+                                       int32_t outputRate,
+                                       Quality quality);
+
+    bool isWriteNeeded() const {
+        return mIntegerPhase >= mDenominator;
+    }
+
+    /**
+     * Write a frame containing N samples.
+     *
+     * @param frame pointer to the first sample in a frame
+     */
+    void writeNextFrame(const float *frame) {
+        writeFrame(frame);
+        advanceWrite();
+    }
+
+    /**
+     * Read a frame containing N samples.
+     *
+     * @param frame pointer to the first sample in a frame
+     */
+    void readNextFrame(float *frame) {
+        readFrame(frame);
+        advanceRead();
+    }
+
+    int getNumTaps() const {
+        return mNumTaps;
+    }
+
+    int getChannelCount() const {
+        return mChannelCount;
+    }
+
+    static float hammingWindow(float radians, float spread);
+
+    static float sinc(float radians);
+
+protected:
+
+    explicit MultiChannelResampler(const MultiChannelResampler::Builder &builder);
+
+    /**
+     * Write a frame containing N samples.
+     * Call advanceWrite() after calling this.
+     * @param frame pointer to the first sample in a frame
+     */
+    virtual void writeFrame(const float *frame);
+
+    /**
+     * Read a frame containing N samples using interpolation.
+     * Call advanceRead() after calling this.
+     * @param frame pointer to the first sample in a frame
+     */
+    virtual void readFrame(float *frame) = 0;
+
+    void advanceWrite() {
+        mIntegerPhase -= mDenominator;
+    }
+
+    void advanceRead() {
+        mIntegerPhase += mNumerator;
+    }
+
+    /**
+     * Generate the filter coefficients in optimal order.
+     * @param inputRate sample rate of the input stream
+     * @param outputRate  sample rate of the output stream
+     * @param numRows number of rows in the array that contain a set of tap coefficients
+     * @param phaseIncrement how much to increment the phase between rows
+     * @param normalizedCutoff filter cutoff frequency normalized to Nyquist rate of output
+     */
+    void generateCoefficients(int32_t inputRate,
+                              int32_t outputRate,
+                              int32_t numRows,
+                              double phaseIncrement,
+                              float normalizedCutoff);
+
+
+    int32_t getIntegerPhase() {
+        return mIntegerPhase;
+    }
+
+    static constexpr int kMaxCoefficients = 8 * 1024;
+    std::vector<float>   mCoefficients;
+
+    const int            mNumTaps;
+    int                  mCursor = 0;
+    std::vector<float>   mX;           // delayed input values for the FIR
+    std::vector<float>   mSingleFrame; // one frame for temporary use
+    int32_t              mIntegerPhase = 0;
+    int32_t              mNumerator = 0;
+    int32_t              mDenominator = 0;
+
+
+private:
+
+#if MCR_USE_KAISER
+    KaiserWindow           mKaiserWindow;
+#else
+    HyperbolicCosineWindow mCoshWindow;
+#endif
+
+    static constexpr float kDefaultNormalizedCutoff = 0.70f;
+
+    const int              mChannelCount;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_MULTICHANNEL_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
new file mode 100644
index 0000000..e47ee8e
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cassert>
+#include <math.h>
+#include "IntegerRatio.h"
+#include "PolyphaseResampler.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+PolyphaseResampler::PolyphaseResampler(const MultiChannelResampler::Builder &builder)
+        : MultiChannelResampler(builder)
+        {
+    assert((getNumTaps() % 4) == 0); // Required for loop unrolling.
+
+    int32_t inputRate = builder.getInputRate();
+    int32_t outputRate = builder.getOutputRate();
+
+    int32_t numRows = mDenominator;
+    double phaseIncrement = (double) inputRate / (double) outputRate;
+    generateCoefficients(inputRate, outputRate,
+                         numRows, phaseIncrement,
+                         builder.getNormalizedCutoff());
+}
+
+void PolyphaseResampler::readFrame(float *frame) {
+    // Clear accumulator for mixing.
+    std::fill(mSingleFrame.begin(), mSingleFrame.end(), 0.0);
+
+    // Multiply input times windowed sinc function.
+    float *coefficients = &mCoefficients[mCoefficientCursor];
+    float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
+    for (int i = 0; i < mNumTaps; i++) {
+        float coefficient = *coefficients++;
+        for (int channel = 0; channel < getChannelCount(); channel++) {
+            mSingleFrame[channel] += *xFrame++ * coefficient;
+        }
+    }
+
+    // Advance and wrap through coefficients.
+    mCoefficientCursor = (mCoefficientCursor + mNumTaps) % mCoefficients.size();
+
+    // Copy accumulator to output.
+    for (int channel = 0; channel < getChannelCount(); channel++) {
+        frame[channel] = mSingleFrame[channel];
+    }
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h
new file mode 100644
index 0000000..3642fce
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_POLYPHASE_RESAMPLER_H
+#define RESAMPLER_POLYPHASE_RESAMPLER_H
+
+#include <memory>
+#include <vector>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "MultiChannelResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+/**
+ * Resampler that is optimized for a reduced ratio of sample rates.
+ * All of the coefficients for each possible phase value are pre-calculated.
+ */
+class PolyphaseResampler : public MultiChannelResampler {
+public:
+    /**
+     *
+     * @param builder containing lots of parameters
+     */
+    explicit PolyphaseResampler(const MultiChannelResampler::Builder &builder);
+
+    virtual ~PolyphaseResampler() = default;
+
+    void readFrame(float *frame) override;
+
+protected:
+
+    int32_t                mCoefficientCursor = 0;
+
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_POLYPHASE_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp
new file mode 100644
index 0000000..fdaf13e
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cassert>
+#include "PolyphaseResamplerMono.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+#define MONO  1
+
+PolyphaseResamplerMono::PolyphaseResamplerMono(const MultiChannelResampler::Builder &builder)
+        : PolyphaseResampler(builder) {
+    assert(builder.getChannelCount() == MONO);
+}
+
+void PolyphaseResamplerMono::writeFrame(const float *frame) {
+    // Move cursor before write so that cursor points to last written frame in read.
+    if (--mCursor < 0) {
+        mCursor = getNumTaps() - 1;
+    }
+    float *dest = &mX[mCursor * MONO];
+    const int offset = mNumTaps * MONO;
+    // Write each channel twice so we avoid having to wrap when running the FIR.
+    const float sample =  frame[0];
+    // Put ordered writes together.
+    dest[0] = sample;
+    dest[offset] = sample;
+}
+
+void PolyphaseResamplerMono::readFrame(float *frame) {
+    // Clear accumulator.
+    float sum = 0.0;
+
+    // Multiply input times precomputed windowed sinc function.
+    const float *coefficients = &mCoefficients[mCoefficientCursor];
+    float *xFrame = &mX[mCursor * MONO];
+    const int numLoops = mNumTaps >> 2; // n/4
+    for (int i = 0; i < numLoops; i++) {
+        // Manual loop unrolling, might get converted to SIMD.
+        sum += *xFrame++ * *coefficients++;
+        sum += *xFrame++ * *coefficients++;
+        sum += *xFrame++ * *coefficients++;
+        sum += *xFrame++ * *coefficients++;
+    }
+
+    mCoefficientCursor = (mCoefficientCursor + mNumTaps) % mCoefficients.size();
+
+    // Copy accumulator to output.
+    frame[0] = sum;
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h
new file mode 100644
index 0000000..fe020b5
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
+#define RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "PolyphaseResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+class PolyphaseResamplerMono : public PolyphaseResampler {
+public:
+    explicit PolyphaseResamplerMono(const MultiChannelResampler::Builder &builder);
+
+    virtual ~PolyphaseResamplerMono() = default;
+
+    void writeFrame(const float *frame) override;
+
+    void readFrame(float *frame) override;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
new file mode 100644
index 0000000..b381851
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cassert>
+#include "PolyphaseResamplerStereo.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+#define STEREO  2
+
+PolyphaseResamplerStereo::PolyphaseResamplerStereo(const MultiChannelResampler::Builder &builder)
+        : PolyphaseResampler(builder) {
+    assert(builder.getChannelCount() == STEREO);
+}
+
+void PolyphaseResamplerStereo::writeFrame(const float *frame) {
+    // Move cursor before write so that cursor points to last written frame in read.
+    if (--mCursor < 0) {
+        mCursor = getNumTaps() - 1;
+    }
+    float *dest = &mX[mCursor * STEREO];
+    const int offset = mNumTaps * STEREO;
+    // Write each channel twice so we avoid having to wrap when running the FIR.
+    const float left =  frame[0];
+    const float right = frame[1];
+    // Put ordered writes together.
+    dest[0] = left;
+    dest[1] = right;
+    dest[offset] = left;
+    dest[1 + offset] = right;
+}
+
+void PolyphaseResamplerStereo::readFrame(float *frame) {
+    // Clear accumulators.
+    float left = 0.0;
+    float right = 0.0;
+
+    // Multiply input times precomputed windowed sinc function.
+    const float *coefficients = &mCoefficients[mCoefficientCursor];
+    float *xFrame = &mX[mCursor * STEREO];
+    const int numLoops = mNumTaps >> 2; // n/4
+    for (int i = 0; i < numLoops; i++) {
+        // Manual loop unrolling, might get converted to SIMD.
+        float coefficient = *coefficients++;
+        left += *xFrame++ * coefficient;
+        right += *xFrame++ * coefficient;
+
+        coefficient = *coefficients++; // next tap
+        left += *xFrame++ * coefficient;
+        right += *xFrame++ * coefficient;
+
+        coefficient = *coefficients++;  // next tap
+        left += *xFrame++ * coefficient;
+        right += *xFrame++ * coefficient;
+
+        coefficient = *coefficients++;  // next tap
+        left += *xFrame++ * coefficient;
+        right += *xFrame++ * coefficient;
+    }
+
+    mCoefficientCursor = (mCoefficientCursor + mNumTaps) % mCoefficients.size();
+
+    // Copy accumulators to output.
+    frame[0] = left;
+    frame[1] = right;
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h
new file mode 100644
index 0000000..ee4caba
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
+#define RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "PolyphaseResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+class PolyphaseResamplerStereo : public PolyphaseResampler {
+public:
+    explicit PolyphaseResamplerStereo(const MultiChannelResampler::Builder &builder);
+
+    virtual ~PolyphaseResamplerStereo() = default;
+
+    void writeFrame(const float *frame) override;
+
+    void readFrame(float *frame) override;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
diff --git a/media/libaaudio/src/flowgraph/resampler/README.md b/media/libaaudio/src/flowgraph/resampler/README.md
new file mode 100644
index 0000000..ea319c7
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/README.md
@@ -0,0 +1,101 @@
+# Sample Rate Converter
+
+This folder contains a sample rate converter, or "resampler".
+
+The converter is based on a sinc function that has been windowed by a hyperbolic cosine.
+We found this had fewer artifacts than the more traditional Kaiser window.
+
+## Building the Resampler
+
+It is part of [Oboe](https://github.com/google/oboe) but has no dependencies on Oboe.
+So the contents of this folder can be used outside of Oboe.
+
+To build it for use outside of Oboe:
+
+1. Copy the "resampler" folder to a folder in your project that is in the include path.
+2. Add all of the \*.cpp files in the resampler folder to your project IDE or Makefile.
+3. In ResamplerDefinitions.h, define RESAMPLER_OUTER_NAMESPACE with your own project name. Alternatively, use -DRESAMPLER_OUTER_NAMESPACE=mynamespace when compiling to avoid modifying the resampler code.
+
+## Creating a Resampler
+
+Include the [main header](MultiChannelResampler.h) for the resampler.
+
+    #include "resampler/MultiChannelResampler.h"
+
+Here is an example of creating a stereo resampler that will convert from 44100 to 48000 Hz.
+Only do this once, when you open your stream. Then use the sample resampler to process multiple buffers.
+
+    MultiChannelResampler *resampler = MultiChannelResampler::make(
+            2, // channel count
+            44100, // input sampleRate
+            48000, // output sampleRate
+            MultiChannelResampler::Quality::Medium); // conversion quality
+
+Possible values for quality include { Fastest, Low, Medium, High, Best }.
+Higher quality levels will sound better but consume more CPU because they have more taps in the filter.
+
+## Fractional Frame Counts
+
+Note that the number of output frames generated for a given number of input frames can vary.
+
+For example, suppose you are converting from 44100 Hz to 48000 Hz and using an input buffer with 960 frames. If you calculate the number of output frames you get:
+
+    960 * 48000 * 44100 = 1044.897959...
+
+You cannot generate a fractional number of frames. So the resampler will sometimes generate 1044 frames and sometimes 1045 frames. On average it will generate 1044.897959 frames. The resampler stores the fraction internally and keeps track of when to consume or generate a frame.
+
+You can either use a fixed number of input frames or a fixed number of output frames. The other frame count will vary.
+
+## Calling the Resampler with a fixed number of OUTPUT frames
+
+In this example, suppose we have a fixed number of output frames and a variable number of input frames.
+
+Assume you start with these variables and a method that returns the next input frame:
+
+    float *outputBuffer;     // multi-channel buffer to be filled
+    int    numOutputFrames;  // number of frames of output
+
+The resampler has a method isWriteNeeded() that tells you whether to write to or read from the resampler.
+
+    int outputFramesLeft = numOutputFrames;
+    while (outputFramesLeft > 0) {
+        if(resampler->isWriteNeeded()) {
+            const float *frame = getNextInputFrame(); // you provide this
+            resampler->writeNextFrame(frame);
+        } else {
+            resampler->readNextFrame(outputBuffer);
+            outputBuffer += channelCount;
+            outputFramesLeft--;
+        }
+    }
+
+## Calling the Resampler with a fixed number of INPUT frames
+
+In this example, suppose we have a fixed number of input frames and a variable number of output frames.
+
+Assume you start with these variables:
+
+    float *inputBuffer;     // multi-channel buffer to be consumed
+    float *outputBuffer;    // multi-channel buffer to be filled
+    int    numInputFrames;  // number of frames of input
+    int    numOutputFrames = 0;
+    int    channelCount;    // 1 for mono, 2 for stereo
+
+    int inputFramesLeft = numInputFrames;
+    while (inputFramesLeft > 0) {
+        if(resampler->isWriteNeeded()) {
+            resampler->writeNextFrame(inputBuffer);
+            inputBuffer += channelCount;
+            inputFramesLeft--;
+        } else {
+            resampler->readNextFrame(outputBuffer);
+            outputBuffer += channelCount;
+            numOutputFrames++;
+        }
+    }
+
+## Deleting the Resampler
+
+When you are done, you should delete the Resampler to avoid a memory leak.
+
+    delete resampler;
diff --git a/media/libaaudio/src/flowgraph/resampler/ResamplerDefinitions.h b/media/libaaudio/src/flowgraph/resampler/ResamplerDefinitions.h
new file mode 100644
index 0000000..c6791ec
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/ResamplerDefinitions.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Set flag RESAMPLER_OUTER_NAMESPACE based on whether compiler flag
+// __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe
+// but not in android.
+
+#ifndef RESAMPLER_OUTER_NAMESPACE
+#ifdef __ANDROID_NDK__
+#define RESAMPLER_OUTER_NAMESPACE oboe
+#else
+#define RESAMPLER_OUTER_NAMESPACE aaudio
+#endif // __ANDROID_NDK__
+#endif // RESAMPLER_OUTER_NAMESPACE
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
new file mode 100644
index 0000000..42d0ca2
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cassert>
+#include <math.h>
+#include "SincResampler.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+SincResampler::SincResampler(const MultiChannelResampler::Builder &builder)
+        : MultiChannelResampler(builder)
+        , mSingleFrame2(builder.getChannelCount()) {
+    assert((getNumTaps() % 4) == 0); // Required for loop unrolling.
+    mNumRows = kMaxCoefficients / getNumTaps(); // no guard row needed
+    mPhaseScaler = (double) mNumRows / mDenominator;
+    double phaseIncrement = 1.0 / mNumRows;
+    generateCoefficients(builder.getInputRate(),
+                         builder.getOutputRate(),
+                         mNumRows,
+                         phaseIncrement,
+                         builder.getNormalizedCutoff());
+}
+
+void SincResampler::readFrame(float *frame) {
+    // Clear accumulator for mixing.
+    std::fill(mSingleFrame.begin(), mSingleFrame.end(), 0.0);
+    std::fill(mSingleFrame2.begin(), mSingleFrame2.end(), 0.0);
+
+    // Determine indices into coefficients table.
+    double tablePhase = getIntegerPhase() * mPhaseScaler;
+    int index1 = static_cast<int>(floor(tablePhase));
+    if (index1 >= mNumRows) { // no guard row needed because we wrap the indices
+        tablePhase -= mNumRows;
+        index1 -= mNumRows;
+    }
+
+    int index2 = index1 + 1;
+    if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
+        index2 -= mNumRows;
+    }
+
+    float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
+            * static_cast<size_t>(getNumTaps())];
+    float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
+            * static_cast<size_t>(getNumTaps())];
+
+    float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
+    for (int i = 0; i < mNumTaps; i++) {
+        float coefficient1 = *coefficients1++;
+        float coefficient2 = *coefficients2++;
+        for (int channel = 0; channel < getChannelCount(); channel++) {
+            float sample = *xFrame++;
+            mSingleFrame[channel] +=  sample * coefficient1;
+            mSingleFrame2[channel] += sample * coefficient2;
+        }
+    }
+
+    // Interpolate and copy to output.
+    float fraction = tablePhase - index1;
+    for (int channel = 0; channel < getChannelCount(); channel++) {
+        float low = mSingleFrame[channel];
+        float high = mSingleFrame2[channel];
+        frame[channel] = low + (fraction * (high - low));
+    }
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResampler.h b/media/libaaudio/src/flowgraph/resampler/SincResampler.h
new file mode 100644
index 0000000..05ff092
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/SincResampler.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_SINC_RESAMPLER_H
+#define RESAMPLER_SINC_RESAMPLER_H
+
+#include <memory>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "MultiChannelResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+/**
+ * Resampler that can interpolate between coefficients.
+ * This can be used to support arbitrary ratios.
+ */
+class SincResampler : public MultiChannelResampler {
+public:
+    explicit SincResampler(const MultiChannelResampler::Builder &builder);
+
+    virtual ~SincResampler() = default;
+
+    void readFrame(float *frame) override;
+
+protected:
+
+    std::vector<float> mSingleFrame2; // for interpolation
+    int32_t            mNumRows = 0;
+    double             mPhaseScaler = 1.0;
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_SINC_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
new file mode 100644
index 0000000..432137e
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cassert>
+#include <math.h>
+
+#include "SincResamplerStereo.h"
+
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+#define STEREO  2
+
+SincResamplerStereo::SincResamplerStereo(const MultiChannelResampler::Builder &builder)
+        : SincResampler(builder) {
+    assert(builder.getChannelCount() == STEREO);
+}
+
+void SincResamplerStereo::writeFrame(const float *frame) {
+    // Move cursor before write so that cursor points to last written frame in read.
+    if (--mCursor < 0) {
+        mCursor = getNumTaps() - 1;
+    }
+    float *dest = &mX[mCursor * STEREO];
+    const int offset = mNumTaps * STEREO;
+    // Write each channel twice so we avoid having to wrap when running the FIR.
+    const float left =  frame[0];
+    const float right = frame[1];
+    // Put ordered writes together.
+    dest[0] = left;
+    dest[1] = right;
+    dest[offset] = left;
+    dest[1 + offset] = right;
+}
+
+// Multiply input times windowed sinc function.
+void SincResamplerStereo::readFrame(float *frame) {
+    // Clear accumulator for mixing.
+    std::fill(mSingleFrame.begin(), mSingleFrame.end(), 0.0);
+    std::fill(mSingleFrame2.begin(), mSingleFrame2.end(), 0.0);
+
+    // Determine indices into coefficients table.
+    double tablePhase = getIntegerPhase() * mPhaseScaler;
+    int index1 = static_cast<int>(floor(tablePhase));
+    float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
+            * static_cast<size_t>(getNumTaps())];
+    int index2 = (index1 + 1);
+    if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
+        index2 = 0;
+    }
+    float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
+            * static_cast<size_t>(getNumTaps())];
+    float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
+    for (int i = 0; i < mNumTaps; i++) {
+        float coefficient1 = *coefficients1++;
+        float coefficient2 = *coefficients2++;
+        for (int channel = 0; channel < getChannelCount(); channel++) {
+            float sample = *xFrame++;
+            mSingleFrame[channel] +=  sample * coefficient1;
+            mSingleFrame2[channel] += sample * coefficient2;
+        }
+    }
+
+    // Interpolate and copy to output.
+    float fraction = tablePhase - index1;
+    for (int channel = 0; channel < getChannelCount(); channel++) {
+        float low = mSingleFrame[channel];
+        float high = mSingleFrame2[channel];
+        frame[channel] = low + (fraction * (high - low));
+    }
+}
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h
new file mode 100644
index 0000000..d5576d1
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RESAMPLER_SINC_RESAMPLER_STEREO_H
+#define RESAMPLER_SINC_RESAMPLER_STEREO_H
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "SincResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
+
+class SincResamplerStereo : public SincResampler {
+public:
+    explicit SincResamplerStereo(const MultiChannelResampler::Builder &builder);
+
+    virtual ~SincResamplerStereo() = default;
+
+    void writeFrame(const float *frame) override;
+
+    void readFrame(float *frame) override;
+
+};
+
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_SINC_RESAMPLER_STEREO_H
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index e96e134..dd11169 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -37,18 +37,6 @@
         : AudioStream() {
 }
 
-AudioStreamLegacy::~AudioStreamLegacy() {
-}
-
-// Called from AudioTrack.cpp or AudioRecord.cpp
-static void AudioStreamLegacy_callback(int event, void* userData, void *info) {
-    AudioStreamLegacy *streamLegacy = (AudioStreamLegacy *) userData;
-    streamLegacy->processCallback(event, info);
-}
-
-aaudio_legacy_callback_t AudioStreamLegacy::getLegacyCallback() {
-    return AudioStreamLegacy_callback;
-}
 
 aaudio_data_callback_result_t AudioStreamLegacy::callDataCallbackFrames(uint8_t *buffer,
                                                                         int32_t numFrames) {
@@ -76,82 +64,140 @@
     return (int32_t) callDataCallbackFrames(buffer, numFrames);
 }
 
-void AudioStreamLegacy::processCallbackCommon(aaudio_callback_operation_t opcode, void *info) {
+
+void AudioStreamLegacy::onNewIAudioTrack() {
+    ALOGD("%s stream disconnected", __func__);
+    forceDisconnect();
+    mCallbackEnabled.store(false);
+}
+
+size_t AudioStreamLegacy::onMoreData(const android::AudioTrack::Buffer& buffer) {
+    // This illegal size can be used to tell AudioRecord or AudioTrack to stop calling us.
+    // This takes advantage of them killing the stream when they see a size out of range.
+    // That is an undocumented behavior.
+    // TODO add to API in AudioRecord and AudioTrack
+    // TODO(b/216175830) cleanup size re-computation
+    const size_t SIZE_STOP_CALLBACKS = SIZE_MAX;
     aaudio_data_callback_result_t callbackResult;
+    (void) checkForDisconnectRequest(true);
+
+    // Note that this code assumes an AudioTrack::Buffer is the same as
+    // AudioRecord::Buffer
+    // TODO define our own AudioBuffer and pass it from the subclasses.
+    size_t written = buffer.size();
+    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        ALOGW("%s() data, stream disconnected", __func__);
+        // This will kill the stream and prevent it from being restarted.
+        // That is OK because the stream is disconnected.
+        written = SIZE_STOP_CALLBACKS;
+    } else if (!mCallbackEnabled.load()) {
+        ALOGW("%s() no data because callback disabled, set size=0", __func__);
+        // Do NOT use SIZE_STOP_CALLBACKS here because that will kill the stream and
+        // prevent it from being restarted. This can occur because of a race condition
+        // caused by Legacy callbacks running after the track is "stopped".
+        written = 0;
+    } else {
+        if (buffer.getFrameCount() == 0) {
+            ALOGW("%s() data, frameCount is zero", __func__);
+            return written;
+        }
+
+        // If the caller specified an exact size then use a block size adapter.
+        if (mBlockAdapter != nullptr) {
+            int32_t byteCount = buffer.getFrameCount() * getBytesPerDeviceFrame();
+            callbackResult = mBlockAdapter->processVariableBlock(
+                    buffer.data(), byteCount);
+        } else {
+            // Call using the AAudio callback interface.
+            callbackResult = callDataCallbackFrames(buffer.data(),
+                                                    buffer.getFrameCount());
+        }
+        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
+            written = buffer.getFrameCount() * getBytesPerDeviceFrame();
+        } else {
+            if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+                ALOGD("%s() callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
+            } else {
+                ALOGW("%s() callback returned invalid result = %d",
+                      __func__, callbackResult);
+            }
+            written = 0;
+            systemStopInternal();
+            // Disable the callback just in case the system keeps trying to call us.
+            mCallbackEnabled.store(false);
+        }
+
+        if (updateStateMachine() != AAUDIO_OK) {
+            forceDisconnect();
+            mCallbackEnabled.store(false);
+        }
+    }
+    return written;
+}
+
+// TODO (b/216175830) this method is duplicated in order to ease refactoring which will
+// reconsolidate.
+size_t AudioStreamLegacy::onMoreData(const android::AudioRecord::Buffer& buffer) {
     // This illegal size can be used to tell AudioRecord or AudioTrack to stop calling us.
     // This takes advantage of them killing the stream when they see a size out of range.
     // That is an undocumented behavior.
     // TODO add to API in AudioRecord and AudioTrack
     const size_t SIZE_STOP_CALLBACKS = SIZE_MAX;
+    aaudio_data_callback_result_t callbackResult;
+    (void) checkForDisconnectRequest(true);
 
-    switch (opcode) {
-        case AAUDIO_CALLBACK_OPERATION_PROCESS_DATA: {
-            (void) checkForDisconnectRequest(true);
-
-            // Note that this code assumes an AudioTrack::Buffer is the same as
-            // AudioRecord::Buffer
-            // TODO define our own AudioBuffer and pass it from the subclasses.
-            AudioTrack::Buffer *audioBuffer = static_cast<AudioTrack::Buffer *>(info);
-            if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
-                ALOGW("processCallbackCommon() data, stream disconnected");
-                // This will kill the stream and prevent it from being restarted.
-                // That is OK because the stream is disconnected.
-                audioBuffer->size = SIZE_STOP_CALLBACKS;
-            } else if (!mCallbackEnabled.load()) {
-                ALOGW("processCallbackCommon() no data because callback disabled, set size=0");
-                // Do NOT use SIZE_STOP_CALLBACKS here because that will kill the stream and
-                // prevent it from being restarted. This can occur because of a race condition
-                // caused by Legacy callbacks running after the track is "stopped".
-                audioBuffer->size = 0;
-            } else {
-                if (audioBuffer->frameCount == 0) {
-                    ALOGW("processCallbackCommon() data, frameCount is zero");
-                    return;
-                }
-
-                // If the caller specified an exact size then use a block size adapter.
-                if (mBlockAdapter != nullptr) {
-                    int32_t byteCount = audioBuffer->frameCount * getBytesPerDeviceFrame();
-                    callbackResult = mBlockAdapter->processVariableBlock(
-                            (uint8_t *) audioBuffer->raw, byteCount);
-                } else {
-                    // Call using the AAudio callback interface.
-                    callbackResult = callDataCallbackFrames((uint8_t *)audioBuffer->raw,
-                                                            audioBuffer->frameCount);
-                }
-                if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-                    audioBuffer->size = audioBuffer->frameCount * getBytesPerDeviceFrame();
-                } else {
-                    if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
-                        ALOGD("%s() callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
-                    } else {
-                        ALOGW("%s() callback returned invalid result = %d",
-                              __func__, callbackResult);
-                    }
-                    audioBuffer->size = 0;
-                    systemStopInternal();
-                    // Disable the callback just in case the system keeps trying to call us.
-                    mCallbackEnabled.store(false);
-                }
-
-                if (updateStateMachine() != AAUDIO_OK) {
-                    forceDisconnect();
-                    mCallbackEnabled.store(false);
-                }
-            }
+    // Note that this code assumes an AudioTrack::Buffer is the same as
+    // AudioRecord::Buffer
+    // TODO define our own AudioBuffer and pass it from the subclasses.
+    size_t written = buffer.size();
+    if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+        ALOGW("%s() data, stream disconnected", __func__);
+        // This will kill the stream and prevent it from being restarted.
+        // That is OK because the stream is disconnected.
+        written = SIZE_STOP_CALLBACKS;
+    } else if (!mCallbackEnabled.load()) {
+        ALOGW("%s() no data because callback disabled, set size=0", __func__);
+        // Do NOT use SIZE_STOP_CALLBACKS here because that will kill the stream and
+        // prevent it from being restarted. This can occur because of a race condition
+        // caused by Legacy callbacks running after the track is "stopped".
+        written = 0;
+    } else {
+        if (buffer.getFrameCount() == 0) {
+            ALOGW("%s() data, frameCount is zero", __func__);
+            return written;
         }
-            break;
 
-        // Stream got rerouted so we disconnect.
-        case AAUDIO_CALLBACK_OPERATION_DISCONNECTED:
-            ALOGD("processCallbackCommon() stream disconnected");
+        // If the caller specified an exact size then use a block size adapter.
+        if (mBlockAdapter != nullptr) {
+            int32_t byteCount = buffer.getFrameCount() * getBytesPerDeviceFrame();
+            callbackResult = mBlockAdapter->processVariableBlock(
+                    buffer.data(), byteCount);
+        } else {
+            // Call using the AAudio callback interface.
+            callbackResult = callDataCallbackFrames(buffer.data(),
+                                                    buffer.getFrameCount());
+        }
+        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
+            written = buffer.getFrameCount() * getBytesPerDeviceFrame();
+        } else {
+            if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+                ALOGD("%s() callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
+            } else {
+                ALOGW("%s() callback returned invalid result = %d",
+                      __func__, callbackResult);
+            }
+            written = 0;
+            systemStopInternal();
+            // Disable the callback just in case the system keeps trying to call us.
+            mCallbackEnabled.store(false);
+        }
+
+        if (updateStateMachine() != AAUDIO_OK) {
             forceDisconnect();
             mCallbackEnabled.store(false);
-            break;
-
-        default:
-            break;
+        }
     }
+    return written;
 }
 
 aaudio_result_t AudioStreamLegacy::checkForDisconnectRequest(bool errorCallbackEnabled) {
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 88ef270..53f6e06 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -17,8 +17,10 @@
 #ifndef LEGACY_AUDIO_STREAM_LEGACY_H
 #define LEGACY_AUDIO_STREAM_LEGACY_H
 
-#include <media/AudioTimestamp.h>
+#include <media/AudioRecord.h>
 #include <media/AudioSystem.h>
+#include <media/AudioTimestamp.h>
+#include <media/AudioTrack.h>
 
 #include <aaudio/AAudio.h>
 
@@ -30,8 +32,6 @@
 namespace aaudio {
 
 
-typedef void (*aaudio_legacy_callback_t)(int event, void* user, void *info);
-
 enum {
     /**
      * Request that the callback function should fill the data buffer of an output stream,
@@ -56,21 +56,18 @@
 typedef int32_t aaudio_callback_operation_t;
 
 
-class AudioStreamLegacy : public AudioStream, public FixedBlockProcessor {
+class AudioStreamLegacy : public AudioStream,
+                          public FixedBlockProcessor,
+                          protected android::AudioTrack::IAudioTrackCallback,
+                          protected android::AudioRecord::IAudioRecordCallback {
 public:
     AudioStreamLegacy();
 
-    virtual ~AudioStreamLegacy();
+    virtual ~AudioStreamLegacy() = default;
 
-    aaudio_legacy_callback_t getLegacyCallback();
 
     int32_t callDataCallbackFrames(uint8_t *buffer, int32_t numFrames);
 
-    // This is public so it can be called from the C callback function.
-    // This is called from the AudioTrack/AudioRecord client.
-    virtual void processCallback(int event, void *info) = 0;
-
-    void processCallbackCommon(aaudio_callback_operation_t opcode, void *info);
 
     // Implement FixedBlockProcessor
     int32_t onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) override;
@@ -86,7 +83,12 @@
     }
 
 protected:
-
+    size_t onMoreData(const android::AudioTrack::Buffer& buffer) override;
+    // TODO (b/216175830) this method is duplicated in order to ease refactoring which will
+    // reconsolidate.
+    size_t onMoreData(const android::AudioRecord::Buffer& buffer) override;
+    void onNewIAudioTrack() override;
+    void onNewIAudioRecord() override { onNewIAudioTrack(); }
     aaudio_result_t getBestTimestamp(clockid_t clockId,
                                      int64_t *framePosition,
                                      int64_t *timeNanoseconds,
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index fe8fb19..1e39e0f 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -96,41 +96,18 @@
         setFormat(AUDIO_FORMAT_PCM_FLOAT);
     }
 
-    // Maybe change device format to get a FAST path.
-    // AudioRecord does not support FAST mode for FLOAT data.
-    // TODO AudioRecord should allow FLOAT data paths for FAST tracks.
-    // So IF the user asks for low latency FLOAT
-    // AND the sampleRate is likely to be compatible with FAST
-    // THEN request I16 and convert to FLOAT when passing to user.
-    // Note that hard coding 48000 Hz is not ideal because the sampleRate
-    // for a FAST path might not be 48000 Hz.
-    // It normally is but there is a chance that it is not.
-    // And there is no reliable way to know that in advance.
-    // Luckily the consequences of a wrong guess are minor.
-    // We just may not get a FAST track.
-    // But we wouldn't have anyway without this hack.
-    constexpr int32_t kMostLikelySampleRateForFast = 48000;
-    if (getFormat() == AUDIO_FORMAT_PCM_FLOAT
-            && perfMode == AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
-            && (audio_channel_count_from_in_mask(channelMask) <= 2) // FAST only for mono and stereo
-            && (getSampleRate() == kMostLikelySampleRateForFast
-                || getSampleRate() == AAUDIO_UNSPECIFIED)) {
-        setDeviceFormat(AUDIO_FORMAT_PCM_16_BIT);
-    } else {
-        setDeviceFormat(getFormat());
-    }
+
+    setDeviceFormat(getFormat());
 
     // To avoid glitching, let AudioFlinger pick the optimal burst size.
     uint32_t notificationFrames = 0;
 
     // Setup the callback if there is one.
-    AudioRecord::callback_t callback = nullptr;
-    void *callbackData = nullptr;
+    sp<AudioRecord::IAudioRecordCallback> callback;
     AudioRecord::transfer_type streamTransferType = AudioRecord::transfer_type::TRANSFER_SYNC;
     if (builder.getDataCallbackProc() != nullptr) {
         streamTransferType = AudioRecord::transfer_type::TRANSFER_CALLBACK;
-        callback = getLegacyCallback();
-        callbackData = this;
+        callback = sp<AudioRecord::IAudioRecordCallback>::fromExisting(this);
     }
     mCallbackBufferSize = builder.getFramesPerDataCallback();
 
@@ -177,7 +154,6 @@
                 channelMask,
                 frameCount,
                 callback,
-                callbackData,
                 notificationFrames,
                 false /*threadCanCallJava*/,
                 sessionId,
@@ -350,23 +326,6 @@
     }
 }
 
-void AudioStreamRecord::processCallback(int event, void *info) {
-    switch (event) {
-        case AudioRecord::EVENT_MORE_DATA:
-            processCallbackCommon(AAUDIO_CALLBACK_OPERATION_PROCESS_DATA, info);
-            break;
-
-            // Stream got rerouted so we disconnect.
-        case AudioRecord::EVENT_NEW_IAUDIORECORD:
-            processCallbackCommon(AAUDIO_CALLBACK_OPERATION_DISCONNECTED, info);
-            break;
-
-        default:
-            break;
-    }
-    return;
-}
-
 aaudio_result_t AudioStreamRecord::requestStart_l()
 {
     if (mAudioRecord.get() == nullptr) {
@@ -504,7 +463,7 @@
     return (aaudio_result_t) framesRead;
 }
 
-aaudio_result_t AudioStreamRecord::setBufferSize(int32_t requestedFrames)
+aaudio_result_t AudioStreamRecord::setBufferSize(int32_t /*requestedFrames*/)
 {
     return getBufferSize();
 }
@@ -552,7 +511,7 @@
         case AAUDIO_STREAM_STATE_STARTED:
             result = mAudioRecord->getPosition(&position);
             if (result == OK) {
-                mFramesWritten.update32(position);
+                mFramesWritten.update32((int32_t)position);
             }
             break;
         case AAUDIO_STREAM_STATE_STOPPING:
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index 692651d..5ce73f9 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -65,7 +65,9 @@
     }
 
     // This is public so it can be called from the C callback function.
-    void processCallback(int event, void *info) override;
+    void processCallback(int event, void *info);
+
+    void processCallbackRecord(aaudio_callback_operation_t opcode, void *info);
 
     int64_t incrementClientFrameCounter(int32_t frames) override {
         return incrementFramesRead(frames);
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 17736fc..6f1dc92 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -103,14 +103,12 @@
             : getFormat();
 
     // Setup the callback if there is one.
-    AudioTrack::callback_t callback = nullptr;
-    void *callbackData = nullptr;
+    wp<AudioTrack::IAudioTrackCallback> callback;
     // Note that TRANSFER_SYNC does not allow FAST track
     AudioTrack::transfer_type streamTransferType = AudioTrack::transfer_type::TRANSFER_SYNC;
     if (builder.getDataCallbackProc() != nullptr) {
         streamTransferType = AudioTrack::transfer_type::TRANSFER_CALLBACK;
-        callback = getLegacyCallback();
-        callbackData = this;
+        callback = wp<AudioTrack::IAudioTrackCallback>::fromExisting(this);
 
         // If the total buffer size is unspecified then base the size on the burst size.
         if (frameCount == 0
@@ -157,13 +155,12 @@
             frameCount,
             flags,
             callback,
-            callbackData,
             notificationFrames,
-            0,       // DEFAULT sharedBuffer*/,
+            nullptr,       // DEFAULT sharedBuffer*/,
             false,   // DEFAULT threadCanCallJava
             sessionId,
             streamTransferType,
-            NULL,    // DEFAULT audio_offload_info_t
+            nullptr,    // DEFAULT audio_offload_info_t
             AttributionSourceState(), // DEFAULT uid and pid
             &attributes,
             // WARNING - If doNotReconnect set true then audio stops after plugging and unplugging
@@ -217,7 +214,6 @@
         mBlockAdapter = nullptr;
     }
 
-    setState(AAUDIO_STREAM_STATE_OPEN);
     setDeviceId(mAudioTrack->getRoutedDeviceId());
 
     aaudio_session_id_t actualSessionId =
@@ -250,6 +246,19 @@
              "open() perfMode changed from %d to %d",
              perfMode, actualPerformanceMode);
 
+    if (getState() != AAUDIO_STREAM_STATE_UNINITIALIZED) {
+        ALOGE("%s - Open canceled since state = %d", __func__, getState());
+        if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED)
+        {
+            ALOGE("%s - Opening while state is disconnected", __func__);
+            safeReleaseClose();
+            return AAUDIO_ERROR_DISCONNECTED;
+        }
+        safeReleaseClose();
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+
+    setState(AAUDIO_STREAM_STATE_OPEN);
     return AAUDIO_OK;
 }
 
@@ -281,31 +290,19 @@
     AudioStream::close_l();
 }
 
-void AudioStreamTrack::processCallback(int event, void *info) {
 
-    switch (event) {
-        case AudioTrack::EVENT_MORE_DATA:
-            processCallbackCommon(AAUDIO_CALLBACK_OPERATION_PROCESS_DATA, info);
-            break;
-
-            // Stream got rerouted so we disconnect.
-        case AudioTrack::EVENT_NEW_IAUDIOTRACK:
-            // request stream disconnect if the restored AudioTrack has properties not matching
-            // what was requested initially
-            if (mAudioTrack->channelCount() != getSamplesPerFrame()
-                    || mAudioTrack->format() != getFormat()
-                    || mAudioTrack->getSampleRate() != getSampleRate()
-                    || mAudioTrack->getRoutedDeviceId() != getDeviceId()
-                    || getBufferCapacityFromDevice() != getBufferCapacity()
-                    || getFramesPerBurstFromDevice() != getFramesPerBurst()) {
-                processCallbackCommon(AAUDIO_CALLBACK_OPERATION_DISCONNECTED, info);
-            }
-            break;
-
-        default:
-            break;
+void AudioStreamTrack::onNewIAudioTrack() {
+    // Stream got rerouted so we disconnect.
+    // request stream disconnect if the restored AudioTrack has properties not matching
+    // what was requested initially
+    if (mAudioTrack->channelCount() != getSamplesPerFrame()
+          || mAudioTrack->format() != getFormat()
+          || mAudioTrack->getSampleRate() != getSampleRate()
+          || mAudioTrack->getRoutedDeviceId() != getDeviceId()
+          || getBufferCapacityFromDevice() != getBufferCapacity()
+          || getFramesPerBurstFromDevice() != getFramesPerBurst()) {
+        AudioStreamLegacy::onNewIAudioTrack();
     }
-    return;
 }
 
 aaudio_result_t AudioStreamTrack::requestStart_l() {
@@ -511,7 +508,7 @@
     case AAUDIO_STREAM_STATE_PAUSED:
         result = mAudioTrack->getPosition(&position);
         if (result == OK) {
-            mFramesRead.update32(position);
+            mFramesRead.update32((int32_t)position);
         }
         break;
     default:
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index f604871..0f4d72b 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -81,9 +81,6 @@
 
     aaudio_result_t updateStateMachine() override;
 
-    // This is public so it can be called from the C callback function.
-    void processCallback(int event, void *info) override;
-
     int64_t incrementClientFrameCounter(int32_t frames) override {
         return incrementFramesWritten(frames);
     }
@@ -100,6 +97,7 @@
 
     int32_t getFramesPerBurstFromDevice() const override;
     int32_t getBufferCapacityFromDevice() const override;
+    void onNewIAudioTrack() override;
 
 private:
 
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index e44ccee..872faca 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -544,45 +544,6 @@
     return AAUDIO_OK;
 }
 
-static int32_t AAudioProperty_getMMapProperty(const char *propName,
-                                              int32_t defaultValue,
-                                              const char * caller) {
-    int32_t prop = property_get_int32(propName, defaultValue);
-    switch (prop) {
-        case AAUDIO_UNSPECIFIED:
-        case AAUDIO_POLICY_NEVER:
-        case AAUDIO_POLICY_ALWAYS:
-        case AAUDIO_POLICY_AUTO:
-            break;
-        default:
-            ALOGE("%s: invalid = %d", caller, prop);
-            prop = defaultValue;
-            break;
-    }
-    return prop;
-}
-
-int32_t AAudioProperty_getMMapPolicy() {
-    return AAudioProperty_getMMapProperty(AAUDIO_PROP_MMAP_POLICY,
-                                          AAUDIO_UNSPECIFIED, __func__);
-}
-
-int32_t AAudioProperty_getMMapExclusivePolicy() {
-    return AAudioProperty_getMMapProperty(AAUDIO_PROP_MMAP_EXCLUSIVE_POLICY,
-                                          AAUDIO_UNSPECIFIED, __func__);
-}
-
-int32_t AAudioProperty_getMixerBursts() {
-    const int32_t defaultBursts = 2; // arbitrary, use 2 for double buffered
-    const int32_t maxBursts = 1024; // arbitrary
-    int32_t prop = property_get_int32(AAUDIO_PROP_MIXER_BURSTS, defaultBursts);
-    if (prop < 1 || prop > maxBursts) {
-        ALOGE("AAudioProperty_getMixerBursts: invalid = %d", prop);
-        prop = defaultBursts;
-    }
-    return prop;
-}
-
 int32_t AAudioProperty_getWakeupDelayMicros() {
     const int32_t minMicros = 0; // arbitrary
     const int32_t defaultMicros = 200; // arbitrary, based on some observed jitter
@@ -599,9 +560,12 @@
 }
 
 int32_t AAudioProperty_getMinimumSleepMicros() {
-    const int32_t minMicros = 20; // arbitrary
-    const int32_t defaultMicros = 200; // arbitrary
-    const int32_t maxMicros = 2000; // arbitrary
+    const int32_t minMicros = 1; // arbitrary
+    // Higher values can increase latency for moderate workloads.
+    // Short values can cause the CPU to short cycle if there is a bug in
+    // calculating the wakeup times.
+    const int32_t defaultMicros = 100; // arbitrary
+    const int32_t maxMicros = 200; // arbitrary
     int32_t prop = property_get_int32(AAUDIO_PROP_MINIMUM_SLEEP_USEC, defaultMicros);
     if (prop < minMicros) {
         ALOGW("AAudioProperty_getMinimumSleepMicros: clipped %d to %d", prop, minMicros);
@@ -613,18 +577,6 @@
     return prop;
 }
 
-int32_t AAudioProperty_getHardwareBurstMinMicros() {
-    const int32_t defaultMicros = 1000; // arbitrary
-    const int32_t maxMicros = 1000 * 1000; // arbitrary
-    int32_t prop = property_get_int32(AAUDIO_PROP_HW_BURST_MIN_USEC, defaultMicros);
-    if (prop < 1 || prop > maxMicros) {
-        ALOGE("AAudioProperty_getHardwareBurstMinMicros: invalid = %d, use %d",
-              prop, defaultMicros);
-        prop = defaultMicros;
-    }
-    return prop;
-}
-
 static int32_t AAudioProperty_getMMapOffsetMicros(const char *functionName,
         const char *propertyName) {
     const int32_t minMicros = -20000; // arbitrary
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index f24df46..b59ce1c 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -128,27 +128,6 @@
 // Note that this code may be replaced by Settings or by some other system configuration tool.
 
 /**
- * Read system property.
- * @return AAUDIO_UNSPECIFIED, AAUDIO_POLICY_NEVER or AAUDIO_POLICY_AUTO or AAUDIO_POLICY_ALWAYS
- */
-int32_t AAudioProperty_getMMapPolicy();
-#define AAUDIO_PROP_MMAP_POLICY           "aaudio.mmap_policy"
-
-/**
- * Read system property.
- * @return AAUDIO_UNSPECIFIED, AAUDIO_POLICY_NEVER or AAUDIO_POLICY_AUTO or AAUDIO_POLICY_ALWAYS
- */
-int32_t AAudioProperty_getMMapExclusivePolicy();
-#define AAUDIO_PROP_MMAP_EXCLUSIVE_POLICY "aaudio.mmap_exclusive_policy"
-
-/**
- * Read system property.
- * @return number of bursts per AAudio service mixer cycle
- */
-int32_t AAudioProperty_getMixerBursts();
-#define AAUDIO_PROP_MIXER_BURSTS           "aaudio.mixer_bursts"
-
-/**
  * Read a system property that specifies the number of extra microseconds that a thread
  * should sleep when waiting for another thread to service a FIFO. This is used
  * to avoid the waking thread from being overly optimistic about the other threads
@@ -169,19 +148,6 @@
 #define AAUDIO_PROP_MINIMUM_SLEEP_USEC      "aaudio.minimum_sleep_usec"
 
 /**
- * Read system property.
- * This is handy in case the DMA is bursting too quickly for the CPU to keep up.
- * For example, there may be a DMA burst every 100 usec but you only
- * want to feed the MMAP buffer every 2000 usec.
- *
- * This will affect the framesPerBurst for an MMAP stream.
- *
- * @return minimum number of microseconds for a MMAP HW burst
- */
-int32_t AAudioProperty_getHardwareBurstMinMicros();
-#define AAUDIO_PROP_HW_BURST_MIN_USEC      "aaudio.hw_burst_min_usec"
-
-/**
  * Read a system property that specifies an offset that will be added to MMAP timestamps.
  * This can be used to correct bias in the timestamp.
  * It can also be used to analyze the time distribution of the timestamp
@@ -227,7 +193,7 @@
  * @return true if f() eventually returns true.
  */
 static inline bool AAudio_tryUntilTrue(
-        std::function<bool()> f, int times, int sleepMs) {
+        const std::function<bool()>& f, int times, int sleepMs) {
     static const useconds_t US_PER_MS = 1000;
 
     sleepMs = std::max(sleepMs, 0);
@@ -299,9 +265,7 @@
 
 class Timestamp {
 public:
-    Timestamp()
-            : mPosition(0)
-            , mNanoseconds(0) {}
+    Timestamp() = default;
     Timestamp(int64_t position, int64_t nanoseconds)
             : mPosition(position)
             , mNanoseconds(nanoseconds) {}
@@ -312,8 +276,8 @@
 
 private:
     // These cannot be const because we need to implement the copy assignment operator.
-    int64_t mPosition;
-    int64_t mNanoseconds;
+    int64_t mPosition{0};
+    int64_t mNanoseconds{0};
 };
 
 
diff --git a/media/libaaudio/src/utility/FixedBlockAdapter.h b/media/libaaudio/src/utility/FixedBlockAdapter.h
index 4dc7e68..290e473 100644
--- a/media/libaaudio/src/utility/FixedBlockAdapter.h
+++ b/media/libaaudio/src/utility/FixedBlockAdapter.h
@@ -35,7 +35,7 @@
 class FixedBlockAdapter
 {
 public:
-    FixedBlockAdapter(FixedBlockProcessor &fixedBlockProcessor)
+    explicit FixedBlockAdapter(FixedBlockProcessor &fixedBlockProcessor)
     : mFixedBlockProcessor(fixedBlockProcessor) {}
 
     virtual ~FixedBlockAdapter() = default;
diff --git a/media/libaaudio/src/utility/FixedBlockReader.h b/media/libaaudio/src/utility/FixedBlockReader.h
index 128dd52..dc82416 100644
--- a/media/libaaudio/src/utility/FixedBlockReader.h
+++ b/media/libaaudio/src/utility/FixedBlockReader.h
@@ -30,7 +30,7 @@
 class FixedBlockReader : public FixedBlockAdapter
 {
 public:
-    FixedBlockReader(FixedBlockProcessor &fixedBlockProcessor);
+    explicit FixedBlockReader(FixedBlockProcessor &fixedBlockProcessor);
 
     virtual ~FixedBlockReader() = default;
 
diff --git a/media/libaaudio/src/utility/FixedBlockWriter.h b/media/libaaudio/src/utility/FixedBlockWriter.h
index f1d917c..3e89b5d 100644
--- a/media/libaaudio/src/utility/FixedBlockWriter.h
+++ b/media/libaaudio/src/utility/FixedBlockWriter.h
@@ -28,7 +28,7 @@
 class FixedBlockWriter : public FixedBlockAdapter
 {
 public:
-    FixedBlockWriter(FixedBlockProcessor &fixedBlockProcessor);
+    explicit FixedBlockWriter(FixedBlockProcessor &fixedBlockProcessor);
 
     virtual ~FixedBlockWriter() = default;
 
diff --git a/media/libaaudio/src/utility/MonotonicCounter.h b/media/libaaudio/src/utility/MonotonicCounter.h
index 63add4e..51eb69b 100644
--- a/media/libaaudio/src/utility/MonotonicCounter.h
+++ b/media/libaaudio/src/utility/MonotonicCounter.h
@@ -30,8 +30,8 @@
 class MonotonicCounter {
 
 public:
-    MonotonicCounter() {};
-    virtual ~MonotonicCounter() {};
+    MonotonicCounter() = default;
+    virtual ~MonotonicCounter() = default;
 
     /**
      * @return current value of the counter
@@ -41,7 +41,12 @@
     }
 
     /**
-     * advance the current value to match the counter
+     * Advance the current value to match the counter.
+     *
+     * Note that it will take several million years for the 64-bit
+     * counters to wrap around.
+     * So we do not use __builtin_sub_overflow.
+     * We want to know if overflow happens because of a bug.
      */
     void catchUpTo(int64_t counter) {
         if ((counter - mCounter64) > 0) {
@@ -74,7 +79,8 @@
      * @return current value of the 64-bit counter
      */
     int64_t update32(int32_t counter32) {
-        int32_t delta = counter32 - mCounter32;
+        int32_t delta;
+        __builtin_sub_overflow(counter32, mCounter32, &delta);
         // protect against the mCounter64 going backwards
         if (delta > 0) {
             mCounter64 += delta;
@@ -108,5 +114,4 @@
     int32_t mCounter32 = 0;
 };
 
-
 #endif //UTILITY_MONOTONIC_COUNTER_H
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 98e9727..4b45909 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -13,6 +13,11 @@
         "-Wall",
         "-Werror",
     ],
+
+    sanitize: {
+        integer_overflow: true,
+        misc_undefined: ["bounds"],
+    },
 }
 
 cc_test {
@@ -48,7 +53,7 @@
     shared_libs: ["libaaudio_internal"],
 }
 
-cc_test {
+cc_binary {
     name: "test_timestamps",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_timestamps.cpp"],
@@ -60,121 +65,71 @@
     name: "test_open_params",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_open_params.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
-cc_test {
+cc_binary {
     name: "test_no_close",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_no_close.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
-cc_test {
+cc_binary {
     name: "test_aaudio_recovery",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_recovery.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
-cc_test {
+cc_binary {
     name: "test_n_streams",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_n_streams.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
-cc_test {
+cc_binary {
     name: "test_bad_disconnect",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_bad_disconnect.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
 cc_test {
     name: "test_various",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_various.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
 cc_test {
     name: "test_session_id",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_session_id.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
-cc_test {
+cc_binary {
     name: "test_aaudio_monkey",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_aaudio_monkey.cpp"],
     header_libs: ["libaaudio_example_utils"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
 cc_test {
     name: "test_attributes",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_attributes.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
 cc_test {
     name: "test_interference",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_interference.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
 cc_test {
@@ -197,27 +152,29 @@
 }
 
 cc_test {
-    name: "test_return_stop",
+    name: "test_monotonic_counter",
     defaults: ["libaaudio_tests_defaults"],
-    srcs: ["test_return_stop.cpp"],
+    srcs: ["test_monotonic_counter.cpp"],
     shared_libs: [
-        "libaaudio",
+        "libaaudio_internal",
         "libbinder",
         "libcutils",
         "libutils",
     ],
 }
 
+cc_binary {
+    name: "test_return_stop",
+    defaults: ["libaaudio_tests_defaults"],
+    srcs: ["test_return_stop.cpp"],
+    shared_libs: ["libaaudio"],
+}
+
 cc_test {
     name: "test_callback_race",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_callback_race.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
 
 cc_test {
@@ -238,7 +195,7 @@
     ],
 }
 
-cc_test {
+cc_binary {
     name: "test_steal_exclusive",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_steal_exclusive.cpp"],
@@ -251,15 +208,9 @@
     ],
 }
 
-
-cc_test {
+cc_binary {
     name: "test_disconnect_race",
     defaults: ["libaaudio_tests_defaults"],
     srcs: ["test_disconnect_race.cpp"],
-    shared_libs: [
-        "libaaudio",
-        "libbinder",
-        "libcutils",
-        "libutils",
-    ],
+    shared_libs: ["libaaudio"],
 }
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index d540866..b88d562 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -16,6 +16,10 @@
 
 // Test AAudio attributes such as Usage, ContentType and InputPreset.
 
+// TODO Many of these tests are duplicates of CTS tests in
+// "test_aaudio_attributes.cpp". That other file is more current.
+// So these tests could be deleted.
+
 #include <stdio.h>
 #include <unistd.h>
 
@@ -91,7 +95,7 @@
     aaudio_allowed_capture_policy_t expectedCapturePolicy =
             (capturePolicy == DONT_SET || capturePolicy == AAUDIO_UNSPECIFIED)
             ? AAUDIO_ALLOW_CAPTURE_BY_ALL // default
-            : preset;
+            : capturePolicy;
     EXPECT_EQ(expectedCapturePolicy, AAudioStream_getAllowedCapturePolicy(aaudioStream));
 
     bool expectedPrivacyMode =
@@ -132,10 +136,7 @@
     AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
     AAUDIO_USAGE_GAME,
     AAUDIO_USAGE_ASSISTANT,
-    AAUDIO_SYSTEM_USAGE_EMERGENCY,
-    AAUDIO_SYSTEM_USAGE_SAFETY,
-    AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
-    AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT
+    // Note that the AAUDIO_SYSTEM_USAGE_* values requires special permission.
 };
 
 static const aaudio_content_type_t sContentypes[] = {
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index d563a7e..66b77eb 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -16,6 +16,9 @@
 
 /*
  * Test FlowGraph
+ *
+ * This file also tests a few different conversion techniques because
+ * sometimes that have caused compiler bugs.
  */
 
 #include <iostream>
@@ -23,35 +26,161 @@
 #include <gtest/gtest.h>
 
 #include "flowgraph/ClipToRange.h"
+#include "flowgraph/MonoBlend.h"
 #include "flowgraph/MonoToMultiConverter.h"
 #include "flowgraph/SourceFloat.h"
 #include "flowgraph/RampLinear.h"
 #include "flowgraph/SinkFloat.h"
 #include "flowgraph/SinkI16.h"
 #include "flowgraph/SinkI24.h"
+#include "flowgraph/SinkI32.h"
 #include "flowgraph/SourceI16.h"
 #include "flowgraph/SourceI24.h"
 
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
 
 constexpr int kBytesPerI24Packed = 3;
 
+constexpr int kNumSamples = 8;
+constexpr std::array<float, kNumSamples> kInputFloat = {
+    1.0f, 0.5f, -0.25f, -1.0f,
+    0.0f, 53.9f, -87.2f, -1.02f};
+
+// Corresponding PCM values  as integers.
+constexpr std::array<int16_t, kNumSamples>  kExpectedI16 = {
+    INT16_MAX, 1 << 14, INT16_MIN / 4, INT16_MIN,
+    0, INT16_MAX, INT16_MIN, INT16_MIN};
+
+constexpr std::array<int32_t, kNumSamples>  kExpectedI32 = {
+    INT32_MAX, 1 << 30, INT32_MIN / 4, INT32_MIN,
+    0, INT32_MAX, INT32_MIN, INT32_MIN};
+
+// =================================== FLOAT to I16 ==============
+
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+void local_convert_float_to_int16(const float *input,
+                                  int16_t *output,
+                                  int count) {
+    for (int i = 0; i < count; i++) {
+        int32_t n = (int32_t) (*input++ * 32768.0f);
+        *output++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
+    }
+}
+
+TEST(test_flowgraph, local_convert_float_int16) {
+    std::array<int16_t, kNumSamples> output;
+
+    // Do it inline, which will probably work even with the buggy compiler.
+    // This validates the expected data.
+    const float *in = kInputFloat.data();
+    int16_t *out = output.data();
+    output.fill(777);
+    for (int i = 0; i < kNumSamples; i++) {
+        int32_t n = (int32_t) (*in++ * 32768.0f);
+        *out++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
+    }
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+
+    // Convert audio signal using the function.
+    output.fill(777);
+    local_convert_float_to_int16(kInputFloat.data(), output.data(), kNumSamples);
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
 TEST(test_flowgraph, module_sinki16) {
-    static const float input[] = {1.0f, 0.5f, -0.25f, -1.0f, 0.0f, 53.9f, -87.2f};
-    static const int16_t expected[] = {32767, 16384, -8192, -32768, 0, 32767, -32768};
-    int16_t output[20];
+    static constexpr int kNumSamples = 8;
+    std::array<int16_t, kNumSamples + 10> output; // larger than input
+
     SourceFloat sourceFloat{1};
     SinkI16 sinkI16{1};
 
-    int numInputFrames = sizeof(input) / sizeof(input[0]);
-    sourceFloat.setData(input, numInputFrames);
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
     sourceFloat.output.connect(&sinkI16.input);
 
-    int numOutputFrames = sizeof(output) / sizeof(int16_t);
-    int32_t numRead = sinkI16.read(output, numOutputFrames);
-    ASSERT_EQ(numInputFrames, numRead);
+    output.fill(777);
+    int32_t numRead = sinkI16.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples, numRead);
     for (int i = 0; i < numRead; i++) {
-        EXPECT_EQ(expected[i], output[i]);
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+// =================================== FLOAT to I32 ==============
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+static int32_t clamp32FromFloat(float f)
+{
+    static const float scale = (float)(1UL << 31);
+    static const float limpos = 1.;
+    static const float limneg = -1.;
+
+    if (f <= limneg) {
+        return INT32_MIN;
+    } else if (f >= limpos) {
+        return INT32_MAX;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+void local_convert_float_to_int32(const float *input,
+                                  int32_t *output,
+                                  int count) {
+    for (int i = 0; i < count; i++) {
+        *output++ = clamp32FromFloat(*input++);
+    }
+}
+
+TEST(test_flowgraph, simple_convert_float_int32) {
+    std::array<int32_t, kNumSamples> output;
+
+    // Do it inline, which will probably work even with a buggy compiler.
+    // This validates the expected data.
+    const float *in = kInputFloat.data();
+    output.fill(777);
+    int32_t *out = output.data();
+    for (int i = 0; i < kNumSamples; i++) {
+        int64_t n = (int64_t) (*in++ * 2147483648.0f);
+        *out++ = (int32_t)std::min((int64_t)INT32_MAX,
+                                   std::max((int64_t)INT32_MIN, n)); // clip
+    }
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+TEST(test_flowgraph, local_convert_float_int32) {
+    std::array<int32_t, kNumSamples> output;
+    // Convert audio signal using the function.
+    output.fill(777);
+    local_convert_float_to_int32(kInputFloat.data(), output.data(), kNumSamples);
+    for (int i = 0; i < kNumSamples; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+    }
+}
+
+TEST(test_flowgraph, module_sinki32) {
+    std::array<int32_t, kNumSamples + 10> output; // larger than input
+
+    SourceFloat sourceFloat{1};
+    SinkI32 sinkI32{1};
+
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
+    sourceFloat.output.connect(&sinkI32.input);
+
+    output.fill(777);
+    int32_t numRead = sinkI32.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
     }
 }
 
@@ -76,31 +205,40 @@
 }
 
 TEST(test_flowgraph, module_ramp_linear) {
+    constexpr int singleNumOutput = 1;
     constexpr int rampSize = 5;
     constexpr int numOutput = 100;
     constexpr float value = 1.0f;
-    constexpr float target = 100.0f;
+    constexpr float initialTarget = 10.0f;
+    constexpr float finalTarget = 100.0f;
+    constexpr float tolerance = 0.0001f; // arbitrary
     float output[numOutput] = {};
     RampLinear rampLinear{1};
     SinkFloat sinkFloat{1};
 
     rampLinear.input.setValue(value);
     rampLinear.setLengthInFrames(rampSize);
-    rampLinear.setTarget(target);
-    rampLinear.forceCurrent(0.0f);
-
     rampLinear.output.connect(&sinkFloat.input);
 
+    // Check that the values go to the initial target instantly.
+    rampLinear.setTarget(initialTarget);
+    int32_t singleNumRead = sinkFloat.read(output, singleNumOutput);
+    ASSERT_EQ(singleNumRead, singleNumOutput);
+    EXPECT_NEAR(value * initialTarget, output[0], tolerance);
+
+    // Now set target and check that the linear ramp works as expected.
+    rampLinear.setTarget(finalTarget);
     int32_t numRead = sinkFloat.read(output, numOutput);
+    const float incrementSize = (finalTarget - initialTarget) / rampSize;
     ASSERT_EQ(numOutput, numRead);
-    constexpr float tolerance = 0.0001f; // arbitrary
+
     int i = 0;
     for (; i < rampSize; i++) {
-        float expected = i * value * target / rampSize;
+        float expected = value * (initialTarget + i * incrementSize);
         EXPECT_NEAR(expected, output[i], tolerance);
     }
     for (; i < numOutput; i++) {
-        float expected = value * target;
+        float expected = value * finalTarget;
         EXPECT_NEAR(expected, output[i], tolerance);
     }
 }
@@ -155,3 +293,29 @@
         EXPECT_NEAR(expected[i], output[i], tolerance);
     }
 }
+
+TEST(test_flowgraph, module_mono_blend) {
+    // Two channel to two channel with 3 inputs and outputs.
+    constexpr int numChannels = 2;
+    constexpr int numFrames = 3;
+
+    static const float input[] = {-0.7, 0.5, -0.25, 1.25, 1000, 2000};
+    static const float expected[] = {-0.1, -0.1, 0.5, 0.5, 1500, 1500};
+    float output[100];
+    SourceFloat sourceFloat{numChannels};
+    MonoBlend monoBlend{numChannels};
+    SinkFloat sinkFloat{numChannels};
+
+    sourceFloat.setData(input, numFrames);
+
+    sourceFloat.output.connect(&monoBlend.input);
+    monoBlend.output.connect(&sinkFloat.input);
+
+    int32_t numRead = sinkFloat.read(output, numFrames);
+    ASSERT_EQ(numRead, numFrames);
+    constexpr float tolerance = 0.000001f; // arbitrary
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_NEAR(expected[i], output[i], tolerance);
+    }
+}
+
diff --git a/media/libaaudio/tests/test_monotonic_counter.cpp b/media/libaaudio/tests/test_monotonic_counter.cpp
new file mode 100644
index 0000000..5cbbaf7
--- /dev/null
+++ b/media/libaaudio/tests/test_monotonic_counter.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Test MonotonicCounter
+ */
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include "utility/MonotonicCounter.h"
+
+TEST(test_monotonic_counter, builtin_wrap) {
+    int32_t x = 0x7FFFFFF0;
+    int32_t y = 0x80000010;
+    int32_t delta;
+    // delta = y - x; // This would cause a numeric overflow!
+    __builtin_sub_overflow(y, x, &delta);
+    ASSERT_EQ(0x20, delta);
+}
+
+// test updating past some overflow points
+TEST(test_monotonic_counter, mono_counter_update32_wrap) {
+    MonotonicCounter counter;
+    ASSERT_EQ(0, counter.get());
+
+    static constexpr uint32_t x = (uint32_t) 0x7FFFFFF0;
+    counter.update32(x);
+    ASSERT_EQ((int64_t)0x7FFFFFF0, counter.get());
+
+    static constexpr uint32_t y = (uint32_t) 0x80000010;
+    counter.update32(y);
+    ASSERT_EQ((int64_t)0x80000010, counter.get());
+
+    counter.update32(0);
+    ASSERT_EQ((int64_t)0x100000000, counter.get());
+}
+
+TEST(test_monotonic_counter, mono_counter_roundup) {
+    MonotonicCounter counter;
+    static constexpr uint32_t x = 2345;
+    counter.update32(x);
+    ASSERT_EQ((int64_t)x, counter.get());
+
+    counter.roundUp64(100);
+    ASSERT_EQ((int64_t)2400, counter.get());
+}
+
+TEST(test_monotonic_counter, mono_counter_catchup) {
+    MonotonicCounter counter;
+    counter.update32(7654);
+    counter.catchUpTo(5000); // already past 5000 so no change
+    ASSERT_EQ((int64_t)7654, counter.get());
+    counter.catchUpTo(9876); // jumps
+    ASSERT_EQ((int64_t)9876, counter.get());
+}
+
+TEST(test_monotonic_counter, mono_counter_increment) {
+    MonotonicCounter counter;
+    counter.update32(1000);
+    counter.increment(-234); // will not go backwards
+    ASSERT_EQ((int64_t)1000, counter.get());
+    counter.increment(96); // advances
+    ASSERT_EQ((int64_t)1096, counter.get());
+}
+
+TEST(test_monotonic_counter, mono_counter_reset) {
+    MonotonicCounter counter;
+    counter.update32(1000);
+    // Counter is monotonic and should not go backwards.
+    counter.update32(500); // No change because 32-bit counter is already past 1000.
+    ASSERT_EQ((int64_t)1000, counter.get());
+
+    counter.reset32();
+    counter.update32(500);
+    ASSERT_EQ((int64_t)1500, counter.get());
+}
diff --git a/media/libaaudio/tests/test_steal_exclusive.cpp b/media/libaaudio/tests/test_steal_exclusive.cpp
index 05c560d..ca4f3d6 100644
--- a/media/libaaudio/tests/test_steal_exclusive.cpp
+++ b/media/libaaudio/tests/test_steal_exclusive.cpp
@@ -110,7 +110,11 @@
         mOpenDelayMillis = openDelayMillis;
     }
 
-    void restartStream() {
+    void setCloseEnabled(bool enabled) {
+        mCloseEnabled = enabled;
+    }
+
+    aaudio_result_t restartStream() {
         int retriesLeft = mMaxRetries;
         aaudio_result_t result;
         do {
@@ -126,6 +130,7 @@
                     mName.c_str(),
                     AAudio_convertResultToText(result));
         } while (retriesLeft-- > 0 && result != AAUDIO_OK);
+        return result;
     }
 
     aaudio_data_callback_result_t onAudioReady(
@@ -189,10 +194,12 @@
         std::lock_guard<std::mutex> lock(mLock);
         aaudio_result_t result = AAUDIO_OK;
         if (mStream != nullptr) {
-            result = AAudioStream_close(mStream);
-            if (result != AAUDIO_OK) {
-                printf("AAudioStream_close returned %s\n",
-                       AAudio_convertResultToText(result));
+            if (mCloseEnabled) {
+                result = AAudioStream_close(mStream);
+                printf("AAudioStream_close() returned %s\n",
+                        AAudio_convertResultToText(result));
+            } else {
+                printf("AAudioStream_close() DISABLED!\n");
             }
             mStream = nullptr;
         }
@@ -232,6 +239,12 @@
         return AAudioStream_requestStart(mStream);
     }
 
+    aaudio_result_t pause() {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStream == nullptr) return 0;
+        return AAudioStream_requestPause(mStream);
+    }
+
     aaudio_result_t stop() {
         std::lock_guard<std::mutex> lock(mLock);
         if (mStream == nullptr) return 0;
@@ -287,6 +300,7 @@
     std::string         mName;
     int                 mMaxRetries = 1;
     int                 mOpenDelayMillis = 0;
+    bool                mCloseEnabled = true;
 };
 
 // Callback function that fills the audio output buffer.
@@ -319,11 +333,13 @@
 }
 
 static void s_usage() {
-    printf("test_steal_exclusive [-i] [-r{maxRetries}] [-d{delay}] -s\n");
+    printf("test_steal_exclusive [-i] [-r{maxRetries}] [-d{delay}] [-p{pausedTime}]-s -c{flag}\n");
     printf("     -i direction INPUT, otherwise OUTPUT\n");
-    printf("     -d delay open by milliseconds, default = 0\n");
-    printf("     -r max retries in the error callback, default = 1\n");
+    printf("     -d Delay open by milliseconds, default = 0\n");
+    printf("     -p Pause first stream then sleep for msec before opening second streams, default = 0\n");
+    printf("     -r max Retries in the error callback, default = 1\n");
     printf("     -s try to open in SHARED mode\n");
+    printf("     -c enable or disabling Closing of the stream with 0/1, default = 1\n");
 }
 
 int main(int argc, char ** argv) {
@@ -334,7 +350,9 @@
     int errorCount = 0;
     int maxRetries = 1;
     int openDelayMillis = 0;
+    bool closeEnabled = true;
     aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
+    int pausedMillis = 0;
 
     // Make printf print immediately so that debug info is not stuck
     // in a buffer if we hang or crash.
@@ -348,12 +366,18 @@
         if (arg[0] == '-') {
             char option = arg[1];
             switch (option) {
+                case 'c':
+                    closeEnabled = atoi(&arg[2]) != 0;
+                    break;
                 case 'd':
                     openDelayMillis = atoi(&arg[2]);
                     break;
                 case 'i':
                     direction = AAUDIO_DIRECTION_INPUT;
                     break;
+                case 'p':
+                    pausedMillis = atoi(&arg[2]);
+                    break;
                 case 'r':
                     maxRetries = atoi(&arg[2]);
                     break;
@@ -376,6 +400,8 @@
     thief.setOpenDelayMillis(openDelayMillis);
     victim.setMaxRetries(maxRetries);
     thief.setMaxRetries(maxRetries);
+    victim.setCloseEnabled(closeEnabled);
+    thief.setCloseEnabled(closeEnabled);
 
     result = victim.openAudioStream(direction, requestedSharingMode);
     if (result != AAUDIO_OK) {
@@ -414,6 +440,12 @@
         }
     }
 
+    if (pausedMillis > 0) {
+        printf("Pausing the VICTIM for %d millis before starting THIEF -----\n", pausedMillis);
+        victim.pause();
+        usleep(pausedMillis * 1000);
+    }
+
     printf("Trying to start the THIEF stream, which may steal the VICTIM MMAP resource -----\n");
     result = thief.openAudioStream(direction, requestedSharingMode);
     if (result != AAUDIO_OK) {
@@ -429,6 +461,25 @@
         errorCount++;
     }
 
+    if (pausedMillis > 0) {
+        result = victim.start();
+        printf("Restarting VICTIM, AAudioStream_requestStart(VICTIM) returned %d "
+               ">>>>>>>>>>>>>>>>>>>>>>\n", result);
+        if (result == AAUDIO_ERROR_DISCONNECTED) {
+            // The stream is disconnected due to thief steal the resource
+            printf("VICTIM was disconnected while hanging as the THIEF "
+                   "stole the resource >>>>>>>>>>>>>>>>>>>>>>\n");
+            result = victim.restartStream();
+            printf("Restarting VICTIM, AAudioStream_requestStart(VICTIM) returned %d "
+                   ">>>>>>>>>>>>>>>>>>>>>>\n", result);
+            if (result != AAUDIO_OK) {
+                errorCount++;
+            }
+        } else {
+            errorCount++;
+        }
+    }
+
     // Give stream time to advance.
     usleep(SLEEP_DURATION_MSEC * 1000);
 
@@ -442,7 +493,7 @@
     }
 
     LOGI("Both streams running. Ask user to plug in headset. ====");
-    printf("\n====\nPlease PLUG IN A HEADSET now!\n====\n\n");
+    printf("\n====\nPlease PLUG IN A HEADSET now! - OPTIONAL\n====\n\n");
 
     if (result == AAUDIO_OK) {
         const int watchLoops = DUET_DURATION_MSEC / SLEEP_DURATION_MSEC;
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index acab774..11724e0 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -14,6 +14,11 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <unordered_map>
+#include <utility>
+#include <vector>
+
 #define LOG_TAG "AidlConversion"
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
@@ -21,6 +26,7 @@
 #include "media/AidlConversion.h"
 
 #include <media/ShmemCompat.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // Utilities
@@ -28,6 +34,40 @@
 namespace android {
 
 using base::unexpected;
+using media::audio::common::AudioChannelLayout;
+using media::audio::common::AudioConfig;
+using media::audio::common::AudioConfigBase;
+using media::audio::common::AudioContentType;
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioDeviceType;
+using media::audio::common::AudioEncapsulationMetadataType;
+using media::audio::common::AudioEncapsulationMode;
+using media::audio::common::AudioEncapsulationType;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioFormatType;
+using media::audio::common::AudioGain;
+using media::audio::common::AudioGainConfig;
+using media::audio::common::AudioGainMode;
+using media::audio::common::AudioInputFlags;
+using media::audio::common::AudioIoFlags;
+using media::audio::common::AudioMode;
+using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioOutputFlags;
+using media::audio::common::AudioPortDeviceExt;
+using media::audio::common::AudioPortExt;
+using media::audio::common::AudioPortMixExt;
+using media::audio::common::AudioPortMixExtUseCase;
+using media::audio::common::AudioProfile;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStandard;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioUsage;
+using media::audio::common::AudioUuid;
+using media::audio::common::ExtraAudioDescriptor;
+using media::audio::common::Int;
+using media::audio::common::PcmType;
 
 namespace {
 
@@ -219,75 +259,7 @@
     return std::string(legacy.c_str());
 }
 
-// The legacy enum is unnamed. Thus, we use int32_t.
-ConversionResult<int32_t> aidl2legacy_AudioPortConfigType_int32_t(
-        media::AudioPortConfigType aidl) {
-    switch (aidl) {
-        case media::AudioPortConfigType::SAMPLE_RATE:
-            return AUDIO_PORT_CONFIG_SAMPLE_RATE;
-        case media::AudioPortConfigType::CHANNEL_MASK:
-            return AUDIO_PORT_CONFIG_CHANNEL_MASK;
-        case media::AudioPortConfigType::FORMAT:
-            return AUDIO_PORT_CONFIG_FORMAT;
-        case media::AudioPortConfigType::GAIN:
-            return AUDIO_PORT_CONFIG_GAIN;
-        case media::AudioPortConfigType::FLAGS:
-            return AUDIO_PORT_CONFIG_FLAGS;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-// The legacy enum is unnamed. Thus, we use int32_t.
-ConversionResult<media::AudioPortConfigType> legacy2aidl_int32_t_AudioPortConfigType(
-        int32_t legacy) {
-    switch (legacy) {
-        case AUDIO_PORT_CONFIG_SAMPLE_RATE:
-            return media::AudioPortConfigType::SAMPLE_RATE;
-        case AUDIO_PORT_CONFIG_CHANNEL_MASK:
-            return media::AudioPortConfigType::CHANNEL_MASK;
-        case AUDIO_PORT_CONFIG_FORMAT:
-            return media::AudioPortConfigType::FORMAT;
-        case AUDIO_PORT_CONFIG_GAIN:
-            return media::AudioPortConfigType::GAIN;
-        case AUDIO_PORT_CONFIG_FLAGS:
-            return media::AudioPortConfigType::FLAGS;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl) {
-    return convertBitmask<unsigned int, int32_t, int, media::AudioPortConfigType>(
-            aidl, aidl2legacy_AudioPortConfigType_int32_t,
-            // AudioPortConfigType enum is index-based.
-            indexToEnum_index<media::AudioPortConfigType>,
-            // AUDIO_PORT_CONFIG_* flags are mask-based.
-            enumToMask_bitmask<unsigned int, int>);
-}
-
-ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy) {
-    return convertBitmask<int32_t, unsigned int, media::AudioPortConfigType, int>(
-            legacy, legacy2aidl_int32_t_AudioPortConfigType,
-            // AUDIO_PORT_CONFIG_* flags are mask-based.
-            indexToEnum_bitmask<unsigned>,
-            // AudioPortConfigType enum is index-based.
-            enumToMask_index<int32_t, media::AudioPortConfigType>);
-}
-
-ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl) {
-    // TODO(ytai): should we convert bit-by-bit?
-    // One problem here is that the representation is both opaque and is different based on the
-    // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
-    return convertReinterpret<audio_channel_mask_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy) {
-    // TODO(ytai): should we convert bit-by-bit?
-    // One problem here is that the representation is both opaque and is different based on the
-    // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
-    return convertReinterpret<int32_t>(legacy);
-}
-
-ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
         media::AudioIoConfigEvent aidl) {
     switch (aidl) {
         case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
@@ -312,8 +284,8 @@
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
-        audio_io_config_event legacy) {
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
+        audio_io_config_event_t legacy) {
     switch (legacy) {
         case AUDIO_OUTPUT_REGISTERED:
             return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
@@ -393,81 +365,1029 @@
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
-        media::audio::common::AudioFormat aidl) {
-    // This relies on AudioFormat being kept in sync with audio_format_t.
-    static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
-    return static_cast<audio_format_t>(aidl);
+namespace {
+
+namespace detail {
+using AudioChannelBitPair = std::pair<audio_channel_mask_t, int>;
+using AudioChannelBitPairs = std::vector<AudioChannelBitPair>;
+using AudioChannelPair = std::pair<audio_channel_mask_t, AudioChannelLayout>;
+using AudioChannelPairs = std::vector<AudioChannelPair>;
+using AudioDevicePair = std::pair<audio_devices_t, AudioDeviceDescription>;
+using AudioDevicePairs = std::vector<AudioDevicePair>;
+using AudioFormatPair = std::pair<audio_format_t, AudioFormatDescription>;
+using AudioFormatPairs = std::vector<AudioFormatPair>;
 }
 
-ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+const detail::AudioChannelBitPairs& getInAudioChannelBits() {
+    static const detail::AudioChannelBitPairs pairs = {
+        { AUDIO_CHANNEL_IN_LEFT, AudioChannelLayout::CHANNEL_FRONT_LEFT },
+        { AUDIO_CHANNEL_IN_RIGHT, AudioChannelLayout::CHANNEL_FRONT_RIGHT },
+        // AUDIO_CHANNEL_IN_FRONT is at the end
+        { AUDIO_CHANNEL_IN_BACK, AudioChannelLayout::CHANNEL_BACK_CENTER },
+        // AUDIO_CHANNEL_IN_*_PROCESSED not supported
+        // AUDIO_CHANNEL_IN_PRESSURE not supported
+        // AUDIO_CHANNEL_IN_*_AXIS not supported
+        // AUDIO_CHANNEL_IN_VOICE_* not supported
+        { AUDIO_CHANNEL_IN_BACK_LEFT, AudioChannelLayout::CHANNEL_BACK_LEFT },
+        { AUDIO_CHANNEL_IN_BACK_RIGHT, AudioChannelLayout::CHANNEL_BACK_RIGHT },
+        { AUDIO_CHANNEL_IN_CENTER, AudioChannelLayout::CHANNEL_FRONT_CENTER },
+        { AUDIO_CHANNEL_IN_LOW_FREQUENCY, AudioChannelLayout::CHANNEL_LOW_FREQUENCY },
+        { AUDIO_CHANNEL_IN_TOP_LEFT, AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT },
+        { AUDIO_CHANNEL_IN_TOP_RIGHT, AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT },
+        // When going from aidl to legacy, IN_CENTER is used
+        { AUDIO_CHANNEL_IN_FRONT, AudioChannelLayout::CHANNEL_FRONT_CENTER }
+    };
+    return pairs;
+}
+
+const detail::AudioChannelPairs& getInAudioChannelPairs() {
+    static const detail::AudioChannelPairs pairs = {
+#define DEFINE_INPUT_LAYOUT(n)                                                 \
+            {                                                                  \
+                AUDIO_CHANNEL_IN_##n,                                          \
+                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>( \
+                        AudioChannelLayout::LAYOUT_##n)                        \
+            }
+
+        DEFINE_INPUT_LAYOUT(MONO),
+        DEFINE_INPUT_LAYOUT(STEREO),
+        DEFINE_INPUT_LAYOUT(FRONT_BACK),
+        // AUDIO_CHANNEL_IN_6 not supported
+        DEFINE_INPUT_LAYOUT(2POINT0POINT2),
+        DEFINE_INPUT_LAYOUT(2POINT1POINT2),
+        DEFINE_INPUT_LAYOUT(3POINT0POINT2),
+        DEFINE_INPUT_LAYOUT(3POINT1POINT2),
+        DEFINE_INPUT_LAYOUT(5POINT1)
+#undef DEFINE_INPUT_LAYOUT
+    };
+    return pairs;
+}
+
+const detail::AudioChannelBitPairs& getOutAudioChannelBits() {
+    static const detail::AudioChannelBitPairs pairs = {
+#define DEFINE_OUTPUT_BITS(n)                                                  \
+            { AUDIO_CHANNEL_OUT_##n, AudioChannelLayout::CHANNEL_##n }
+
+        DEFINE_OUTPUT_BITS(FRONT_LEFT),
+        DEFINE_OUTPUT_BITS(FRONT_RIGHT),
+        DEFINE_OUTPUT_BITS(FRONT_CENTER),
+        DEFINE_OUTPUT_BITS(LOW_FREQUENCY),
+        DEFINE_OUTPUT_BITS(BACK_LEFT),
+        DEFINE_OUTPUT_BITS(BACK_RIGHT),
+        DEFINE_OUTPUT_BITS(FRONT_LEFT_OF_CENTER),
+        DEFINE_OUTPUT_BITS(FRONT_RIGHT_OF_CENTER),
+        DEFINE_OUTPUT_BITS(BACK_CENTER),
+        DEFINE_OUTPUT_BITS(SIDE_LEFT),
+        DEFINE_OUTPUT_BITS(SIDE_RIGHT),
+        DEFINE_OUTPUT_BITS(TOP_CENTER),
+        DEFINE_OUTPUT_BITS(TOP_FRONT_LEFT),
+        DEFINE_OUTPUT_BITS(TOP_FRONT_CENTER),
+        DEFINE_OUTPUT_BITS(TOP_FRONT_RIGHT),
+        DEFINE_OUTPUT_BITS(TOP_BACK_LEFT),
+        DEFINE_OUTPUT_BITS(TOP_BACK_CENTER),
+        DEFINE_OUTPUT_BITS(TOP_BACK_RIGHT),
+        DEFINE_OUTPUT_BITS(TOP_SIDE_LEFT),
+        DEFINE_OUTPUT_BITS(TOP_SIDE_RIGHT),
+        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_LEFT),
+        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_CENTER),
+        DEFINE_OUTPUT_BITS(BOTTOM_FRONT_RIGHT),
+        DEFINE_OUTPUT_BITS(LOW_FREQUENCY_2),
+        DEFINE_OUTPUT_BITS(FRONT_WIDE_LEFT),
+        DEFINE_OUTPUT_BITS(FRONT_WIDE_RIGHT),
+#undef DEFINE_OUTPUT_BITS
+        { AUDIO_CHANNEL_OUT_HAPTIC_A, AudioChannelLayout::CHANNEL_HAPTIC_A },
+        { AUDIO_CHANNEL_OUT_HAPTIC_B, AudioChannelLayout::CHANNEL_HAPTIC_B }
+    };
+    return pairs;
+}
+
+const detail::AudioChannelPairs& getOutAudioChannelPairs() {
+    static const detail::AudioChannelPairs pairs = {
+#define DEFINE_OUTPUT_LAYOUT(n)                                                \
+            {                                                                  \
+                AUDIO_CHANNEL_OUT_##n,                                         \
+                AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>( \
+                        AudioChannelLayout::LAYOUT_##n)                        \
+            }
+
+        DEFINE_OUTPUT_LAYOUT(MONO),
+        DEFINE_OUTPUT_LAYOUT(STEREO),
+        DEFINE_OUTPUT_LAYOUT(2POINT1),
+        DEFINE_OUTPUT_LAYOUT(TRI),
+        DEFINE_OUTPUT_LAYOUT(TRI_BACK),
+        DEFINE_OUTPUT_LAYOUT(3POINT1),
+        DEFINE_OUTPUT_LAYOUT(2POINT0POINT2),
+        DEFINE_OUTPUT_LAYOUT(2POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(3POINT0POINT2),
+        DEFINE_OUTPUT_LAYOUT(3POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(QUAD),
+        DEFINE_OUTPUT_LAYOUT(QUAD_SIDE),
+        DEFINE_OUTPUT_LAYOUT(SURROUND),
+        DEFINE_OUTPUT_LAYOUT(PENTA),
+        DEFINE_OUTPUT_LAYOUT(5POINT1),
+        DEFINE_OUTPUT_LAYOUT(5POINT1_SIDE),
+        DEFINE_OUTPUT_LAYOUT(5POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(5POINT1POINT4),
+        DEFINE_OUTPUT_LAYOUT(6POINT1),
+        DEFINE_OUTPUT_LAYOUT(7POINT1),
+        DEFINE_OUTPUT_LAYOUT(7POINT1POINT2),
+        DEFINE_OUTPUT_LAYOUT(7POINT1POINT4),
+        DEFINE_OUTPUT_LAYOUT(13POINT_360RA),
+        DEFINE_OUTPUT_LAYOUT(22POINT2),
+        DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_A),
+        DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_A),
+        DEFINE_OUTPUT_LAYOUT(HAPTIC_AB),
+        DEFINE_OUTPUT_LAYOUT(MONO_HAPTIC_AB),
+        DEFINE_OUTPUT_LAYOUT(STEREO_HAPTIC_AB)
+#undef DEFINE_OUTPUT_LAYOUT
+    };
+    return pairs;
+}
+
+const detail::AudioChannelPairs& getVoiceAudioChannelPairs() {
+    static const detail::AudioChannelPairs pairs = {
+#define DEFINE_VOICE_LAYOUT(n)                                                 \
+            {                                                                  \
+                AUDIO_CHANNEL_IN_VOICE_##n,                                    \
+                AudioChannelLayout::make<AudioChannelLayout::Tag::voiceMask>(  \
+                        AudioChannelLayout::VOICE_##n)                         \
+            }
+        DEFINE_VOICE_LAYOUT(UPLINK_MONO),
+        DEFINE_VOICE_LAYOUT(DNLINK_MONO),
+        DEFINE_VOICE_LAYOUT(CALL_MONO)
+#undef DEFINE_VOICE_LAYOUT
+    };
+    return pairs;
+}
+
+AudioDeviceDescription make_AudioDeviceDescription(AudioDeviceType type,
+        const std::string& connection = "") {
+    AudioDeviceDescription result;
+    result.type = type;
+    result.connection = connection;
+    return result;
+}
+
+void append_AudioDeviceDescription(detail::AudioDevicePairs& pairs,
+        audio_devices_t inputType, audio_devices_t outputType,
+        AudioDeviceType inType, AudioDeviceType outType,
+        const std::string& connection = "") {
+    pairs.push_back(std::make_pair(inputType, make_AudioDeviceDescription(inType, connection)));
+    pairs.push_back(std::make_pair(outputType, make_AudioDeviceDescription(outType, connection)));
+}
+
+const detail::AudioDevicePairs& getAudioDevicePairs() {
+    static const detail::AudioDevicePairs pairs = []() {
+        detail::AudioDevicePairs pairs = {{
+            {
+                AUDIO_DEVICE_NONE, AudioDeviceDescription{}
+            },
+            {
+                AUDIO_DEVICE_OUT_EARPIECE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER_EARPIECE)
+            },
+            {
+                AUDIO_DEVICE_OUT_SPEAKER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER)
+            },
+            {
+                AUDIO_DEVICE_OUT_WIRED_HEADPHONE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_HEADPHONE,
+                        AudioDeviceDescription::CONNECTION_ANALOG())
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_SCO, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_DEVICE,
+                        AudioDeviceDescription::CONNECTION_BT_SCO())
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_CARKIT,
+                        AudioDeviceDescription::CONNECTION_BT_SCO())
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_HEADPHONE,
+                        AudioDeviceDescription::CONNECTION_BT_A2DP())
+            },
+            {
+                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER,
+                        AudioDeviceDescription::CONNECTION_BT_A2DP())
+            },
+            {
+                AUDIO_DEVICE_OUT_TELEPHONY_TX, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_TELEPHONY_TX)
+            },
+            {
+                AUDIO_DEVICE_OUT_AUX_LINE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_LINE_AUX)
+            },
+            {
+                AUDIO_DEVICE_OUT_SPEAKER_SAFE, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER_SAFE)
+            },
+            {
+                AUDIO_DEVICE_OUT_HEARING_AID, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_HEARING_AID,
+                        AudioDeviceDescription::CONNECTION_WIRELESS())
+            },
+            {
+                AUDIO_DEVICE_OUT_ECHO_CANCELLER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_ECHO_CANCELLER)
+            },
+            {
+                AUDIO_DEVICE_OUT_BLE_SPEAKER, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_SPEAKER,
+                        AudioDeviceDescription::CONNECTION_BT_LE())
+            },
+            {
+                AUDIO_DEVICE_OUT_BLE_BROADCAST, make_AudioDeviceDescription(
+                        AudioDeviceType::OUT_BROADCAST,
+                        AudioDeviceDescription::CONNECTION_BT_LE())
+            },
+            // AUDIO_DEVICE_IN_AMBIENT and IN_COMMUNICATION are removed since they were deprecated.
+            {
+                AUDIO_DEVICE_IN_BUILTIN_MIC, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_MICROPHONE)
+            },
+            {
+                AUDIO_DEVICE_IN_BACK_MIC, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_MICROPHONE_BACK)
+            },
+            {
+                AUDIO_DEVICE_IN_TELEPHONY_RX, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_TELEPHONY_RX)
+            },
+            {
+                AUDIO_DEVICE_IN_TV_TUNER, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_TV_TUNER)
+            },
+            {
+                AUDIO_DEVICE_IN_LOOPBACK, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_LOOPBACK)
+            },
+            {
+                AUDIO_DEVICE_IN_BLUETOOTH_BLE, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_DEVICE,
+                        AudioDeviceDescription::CONNECTION_BT_LE())
+            },
+            {
+                AUDIO_DEVICE_IN_ECHO_REFERENCE, make_AudioDeviceDescription(
+                        AudioDeviceType::IN_ECHO_REFERENCE)
+            }
+        }};
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_DEFAULT, AUDIO_DEVICE_OUT_DEFAULT,
+                AudioDeviceType::IN_DEFAULT, AudioDeviceType::OUT_DEFAULT);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                AudioDeviceDescription::CONNECTION_ANALOG());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                AudioDeviceDescription::CONNECTION_BT_SCO());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_HDMI, AUDIO_DEVICE_OUT_HDMI,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_HDMI());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+                AudioDeviceType::IN_SUBMIX, AudioDeviceType::OUT_SUBMIX);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET, AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET,
+                AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
+                AudioDeviceDescription::CONNECTION_ANALOG());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET,
+                AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
+                AudioDeviceDescription::CONNECTION_USB());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_USB_ACCESSORY, AUDIO_DEVICE_OUT_USB_ACCESSORY,
+                AudioDeviceType::IN_ACCESSORY, AudioDeviceType::OUT_ACCESSORY,
+                AudioDeviceDescription::CONNECTION_USB());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_OUT_USB_DEVICE,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_USB());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_FM_TUNER, AUDIO_DEVICE_OUT_FM,
+                AudioDeviceType::IN_FM_TUNER, AudioDeviceType::OUT_FM);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_LINE, AUDIO_DEVICE_OUT_LINE,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_ANALOG());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_SPDIF, AUDIO_DEVICE_OUT_SPDIF,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_SPDIF());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BLUETOOTH_A2DP, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_BT_A2DP());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_IP, AUDIO_DEVICE_OUT_IP,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_IP_V4());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BUS, AUDIO_DEVICE_OUT_BUS,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_BUS());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_PROXY, AUDIO_DEVICE_OUT_PROXY,
+                AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY);
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_OUT_USB_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                AudioDeviceDescription::CONNECTION_USB());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_HDMI_ARC, AUDIO_DEVICE_OUT_HDMI_ARC,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_HDMI_ARC());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_HDMI_EARC, AUDIO_DEVICE_OUT_HDMI_EARC,
+                AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
+                AudioDeviceDescription::CONNECTION_HDMI_EARC());
+        append_AudioDeviceDescription(pairs,
+                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_HEADSET,
+                AudioDeviceType::IN_HEADSET, AudioDeviceType::OUT_HEADSET,
+                AudioDeviceDescription::CONNECTION_BT_LE());
+        return pairs;
+    }();
+    return pairs;
+}
+
+AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
+    AudioFormatDescription result;
+    result.type = type;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
+    auto result = make_AudioFormatDescription(AudioFormatType::PCM);
+    result.pcm = pcm;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
+    AudioFormatDescription result;
+    result.encoding = encoding;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType transport,
+        const std::string& encoding) {
+    auto result = make_AudioFormatDescription(encoding);
+    result.pcm = transport;
+    return result;
+}
+
+const detail::AudioFormatPairs& getAudioFormatPairs() {
+    static const detail::AudioFormatPairs pairs = {{
+        {
+            AUDIO_FORMAT_INVALID,
+            make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID)
+        },
+        {
+            AUDIO_FORMAT_DEFAULT, AudioFormatDescription{}
+        },
+        {
+            AUDIO_FORMAT_PCM_16_BIT, make_AudioFormatDescription(PcmType::INT_16_BIT)
+        },
+        {
+            AUDIO_FORMAT_PCM_8_BIT, make_AudioFormatDescription(PcmType::UINT_8_BIT)
+        },
+        {
+            AUDIO_FORMAT_PCM_32_BIT, make_AudioFormatDescription(PcmType::INT_32_BIT)
+        },
+        {
+            AUDIO_FORMAT_PCM_8_24_BIT, make_AudioFormatDescription(PcmType::FIXED_Q_8_24)
+        },
+        {
+            AUDIO_FORMAT_PCM_FLOAT, make_AudioFormatDescription(PcmType::FLOAT_32_BIT)
+        },
+        {
+            AUDIO_FORMAT_PCM_24_BIT_PACKED, make_AudioFormatDescription(PcmType::INT_24_BIT)
+        },
+        {
+            AUDIO_FORMAT_MP3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEG)
+        },
+        {
+            AUDIO_FORMAT_AMR_NB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AMR_NB)
+        },
+        {
+            AUDIO_FORMAT_AMR_WB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AMR_WB)
+        },
+        {
+            AUDIO_FORMAT_AAC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_MP4)
+        },
+        {
+            AUDIO_FORMAT_AAC_MAIN, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_MAIN)
+        },
+        {
+            AUDIO_FORMAT_AAC_LC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LC)
+        },
+        {
+            AUDIO_FORMAT_AAC_SSR, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_SSR)
+        },
+        {
+            AUDIO_FORMAT_AAC_LTP, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LTP)
+        },
+        {
+            AUDIO_FORMAT_AAC_HE_V1, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_HE_V1)
+        },
+        {
+            AUDIO_FORMAT_AAC_SCALABLE,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE)
+        },
+        {
+            AUDIO_FORMAT_AAC_ERLC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ERLC)
+        },
+        {
+            AUDIO_FORMAT_AAC_LD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LD)
+        },
+        {
+            AUDIO_FORMAT_AAC_HE_V2, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_HE_V2)
+        },
+        {
+            AUDIO_FORMAT_AAC_ELD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ELD)
+        },
+        {
+            AUDIO_FORMAT_AAC_XHE, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_XHE)
+        },
+        // AUDIO_FORMAT_HE_AAC_V1 and HE_AAC_V2 are removed since they were deprecated long time
+        // ago.
+        {
+            AUDIO_FORMAT_VORBIS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_VORBIS)
+        },
+        {
+            AUDIO_FORMAT_OPUS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_OPUS)
+        },
+        {
+            AUDIO_FORMAT_AC3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AC3)
+        },
+        {
+            AUDIO_FORMAT_E_AC3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EAC3)
+        },
+        {
+            AUDIO_FORMAT_E_AC3_JOC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EAC3_JOC)
+        },
+        {
+            AUDIO_FORMAT_DTS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DTS)
+        },
+        {
+            AUDIO_FORMAT_DTS_HD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DTS_HD)
+        },
+        // In the future, we would like to represent encapsulated bitstreams as
+        // nested AudioFormatDescriptions. The legacy 'AUDIO_FORMAT_IEC61937' type doesn't
+        // specify the format of the encapsulated bitstream.
+        {
+            AUDIO_FORMAT_IEC61937,
+            make_AudioFormatDescription(PcmType::INT_16_BIT, MEDIA_MIMETYPE_AUDIO_IEC61937)
+        },
+        {
+            AUDIO_FORMAT_DOLBY_TRUEHD,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD)
+        },
+        {
+            AUDIO_FORMAT_EVRC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRC)
+        },
+        {
+            AUDIO_FORMAT_EVRCB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRCB)
+        },
+        {
+            AUDIO_FORMAT_EVRCWB, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRCWB)
+        },
+        {
+            AUDIO_FORMAT_EVRCNW, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_EVRCNW)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADIF, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADIF)
+        },
+        {
+            AUDIO_FORMAT_WMA, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_WMA)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_WMA_PRO, make_AudioFormatDescription("audio/x-ms-wma.pro")
+        },
+        {
+            AUDIO_FORMAT_AMR_WB_PLUS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS)
+        },
+        {
+            AUDIO_FORMAT_MP2, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II)
+        },
+        {
+            AUDIO_FORMAT_QCELP, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_QCELP)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_DSD, make_AudioFormatDescription("audio/vnd.sony.dsd")
+        },
+        {
+            AUDIO_FORMAT_FLAC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_FLAC)
+        },
+        {
+            AUDIO_FORMAT_ALAC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_ALAC)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_APE, make_AudioFormatDescription("audio/x-ape")
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_MAIN,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_LC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_SSR,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_LTP,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_HE_V1,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_SCALABLE,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_ERLC,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_LD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_HE_V2,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_ELD,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD)
+        },
+        {
+            AUDIO_FORMAT_AAC_ADTS_XHE,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE)
+        },
+        {
+            // Note: not in the IANA registry. "vnd.octel.sbc" is not BT SBC.
+            AUDIO_FORMAT_SBC, make_AudioFormatDescription("audio/x-sbc")
+        },
+        {
+            AUDIO_FORMAT_APTX, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_APTX)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_APTX_HD, make_AudioFormatDescription("audio/vnd.qcom.aptx.hd")
+        },
+        {
+            AUDIO_FORMAT_AC4, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AC4)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_LDAC, make_AudioFormatDescription("audio/vnd.sony.ldac")
+        },
+        {
+            AUDIO_FORMAT_MAT, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_MAT_1_0,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT + std::string(".1.0"))
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_MAT_2_0,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT + std::string(".2.0"))
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_MAT_2_1,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DOLBY_MAT + std::string(".2.1"))
+        },
+        {
+            AUDIO_FORMAT_AAC_LATM, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC)
+        },
+        {
+            AUDIO_FORMAT_AAC_LATM_LC, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC)
+        },
+        {
+            AUDIO_FORMAT_AAC_LATM_HE_V1,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1)
+        },
+        {
+            AUDIO_FORMAT_AAC_LATM_HE_V2,
+            make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2)
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_CELT, make_AudioFormatDescription("audio/x-celt")
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_APTX_ADAPTIVE, make_AudioFormatDescription("audio/vnd.qcom.aptx.adaptive")
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_LHDC, make_AudioFormatDescription("audio/vnd.savitech.lhdc")
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_LHDC_LL, make_AudioFormatDescription("audio/vnd.savitech.lhdc.ll")
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_APTX_TWSP, make_AudioFormatDescription("audio/vnd.qcom.aptx.twsp")
+        },
+        {
+            // Note: not in the IANA registry.
+            AUDIO_FORMAT_LC3, make_AudioFormatDescription("audio/x-lc3")
+        },
+        {
+            AUDIO_FORMAT_MPEGH, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1)
+        },
+        {
+            AUDIO_FORMAT_MPEGH_BL_L3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3)
+        },
+        {
+            AUDIO_FORMAT_MPEGH_BL_L4, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4)
+        },
+        {
+            AUDIO_FORMAT_MPEGH_LC_L3, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3)
+        },
+        {
+            AUDIO_FORMAT_MPEGH_LC_L4, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4)
+        },
+        {
+            AUDIO_FORMAT_IEC60958,
+            make_AudioFormatDescription(PcmType::INT_24_BIT, MEDIA_MIMETYPE_AUDIO_IEC60958)
+        },
+        {
+            AUDIO_FORMAT_DTS_UHD, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DTS_UHD)
+        },
+        {
+            AUDIO_FORMAT_DRA, make_AudioFormatDescription(MEDIA_MIMETYPE_AUDIO_DRA)
+        },
+    }};
+    return pairs;
+}
+
+template<typename S, typename T>
+std::unordered_map<S, T> make_DirectMap(const std::vector<std::pair<S, T>>& v) {
+    std::unordered_map<S, T> result(v.begin(), v.end());
+    LOG_ALWAYS_FATAL_IF(result.size() != v.size(), "Duplicate key elements detected");
+    return result;
+}
+
+template<typename S, typename T>
+std::unordered_map<S, T> make_DirectMap(
+        const std::vector<std::pair<S, T>>& v1, const std::vector<std::pair<S, T>>& v2) {
+    std::unordered_map<S, T> result(v1.begin(), v1.end());
+    LOG_ALWAYS_FATAL_IF(result.size() != v1.size(), "Duplicate key elements detected in v1");
+    result.insert(v2.begin(), v2.end());
+    LOG_ALWAYS_FATAL_IF(result.size() != v1.size() + v2.size(),
+            "Duplicate key elements detected in v1+v2");
+    return result;
+}
+
+template<typename S, typename T>
+std::unordered_map<T, S> make_ReverseMap(const std::vector<std::pair<S, T>>& v) {
+    std::unordered_map<T, S> result;
+    std::transform(v.begin(), v.end(), std::inserter(result, result.begin()),
+            [](const std::pair<S, T>& p) {
+                return std::make_pair(p.second, p.first);
+            });
+    LOG_ALWAYS_FATAL_IF(result.size() != v.size(), "Duplicate key elements detected");
+    return result;
+}
+
+}  // namespace
+
+audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+        int aidlLayout, bool isInput) {
+    auto& bitMapping = isInput ? getInAudioChannelBits() : getOutAudioChannelBits();
+    const int aidlLayoutInitial = aidlLayout; // for error message
+    audio_channel_mask_t legacy = AUDIO_CHANNEL_NONE;
+    for (const auto& bitPair : bitMapping) {
+        if ((aidlLayout & bitPair.second) == bitPair.second) {
+            legacy = static_cast<audio_channel_mask_t>(legacy | bitPair.first);
+            aidlLayout &= ~bitPair.second;
+            if (aidlLayout == 0) {
+                return legacy;
+            }
+        }
+    }
+    ALOGE("%s: aidl layout 0x%x contains bits 0x%x that have no match to legacy %s bits",
+            __func__, aidlLayoutInitial, aidlLayout, isInput ? "input" : "output");
+    return AUDIO_CHANNEL_NONE;
+}
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+        const AudioChannelLayout& aidl, bool isInput) {
+    using ReverseMap = std::unordered_map<AudioChannelLayout, audio_channel_mask_t>;
+    using Tag = AudioChannelLayout::Tag;
+    static const ReverseMap mIn = make_ReverseMap(getInAudioChannelPairs());
+    static const ReverseMap mOut = make_ReverseMap(getOutAudioChannelPairs());
+    static const ReverseMap mVoice = make_ReverseMap(getVoiceAudioChannelPairs());
+
+    auto convert = [](const AudioChannelLayout& aidl, const ReverseMap& m,
+            const char* func, const char* type) -> ConversionResult<audio_channel_mask_t> {
+        if (auto it = m.find(aidl); it != m.end()) {
+            return it->second;
+        } else {
+            ALOGW("%s: no legacy %s audio_channel_mask_t found for %s", func, type,
+                    aidl.toString().c_str());
+            return unexpected(BAD_VALUE);
+        }
+    };
+
+    switch (aidl.getTag()) {
+        case Tag::none:
+            return AUDIO_CHANNEL_NONE;
+        case Tag::invalid:
+            return AUDIO_CHANNEL_INVALID;
+        case Tag::indexMask:
+            // Index masks do not have pre-defined values.
+            if (const int bits = aidl.get<Tag::indexMask>();
+                    __builtin_popcount(bits) != 0 &&
+                    __builtin_popcount(bits) <= AUDIO_CHANNEL_COUNT_MAX) {
+                return audio_channel_mask_from_representation_and_bits(
+                        AUDIO_CHANNEL_REPRESENTATION_INDEX, bits);
+            } else {
+                ALOGE("%s: invalid indexMask value 0x%x in %s",
+                        __func__, bits, aidl.toString().c_str());
+                return unexpected(BAD_VALUE);
+            }
+        case Tag::layoutMask:
+            // The fast path is to find a direct match for some known layout mask.
+            if (const auto layoutMatch = convert(aidl, isInput ? mIn : mOut, __func__,
+                    isInput ? "input" : "output");
+                    layoutMatch.ok()) {
+                return layoutMatch;
+            }
+            // If a match for a predefined layout wasn't found, make a custom one from bits.
+            if (audio_channel_mask_t bitMask =
+                    aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+                            aidl.get<Tag::layoutMask>(), isInput);
+                    bitMask != AUDIO_CHANNEL_NONE) {
+                return bitMask;
+            }
+            return unexpected(BAD_VALUE);
+        case Tag::voiceMask:
+            return convert(aidl, mVoice, __func__, "voice");
+    }
+    ALOGE("%s: unexpected tag value %d", __func__, aidl.getTag());
+    return unexpected(BAD_VALUE);
+}
+
+int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+        audio_channel_mask_t legacy, bool isInput) {
+    auto& bitMapping = isInput ? getInAudioChannelBits() : getOutAudioChannelBits();
+    const int legacyInitial = legacy; // for error message
+    int aidlLayout = 0;
+    for (const auto& bitPair : bitMapping) {
+        if ((legacy & bitPair.first) == bitPair.first) {
+            aidlLayout |= bitPair.second;
+            legacy = static_cast<audio_channel_mask_t>(legacy & ~bitPair.first);
+            if (legacy == 0) {
+                return aidlLayout;
+            }
+        }
+    }
+    ALOGE("%s: legacy %s audio_channel_mask_t 0x%x contains unrecognized bits 0x%x",
+            __func__, isInput ? "input" : "output", legacyInitial, legacy);
+    return 0;
+}
+
+ConversionResult<AudioChannelLayout> legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+        audio_channel_mask_t legacy, bool isInput) {
+    using DirectMap = std::unordered_map<audio_channel_mask_t, AudioChannelLayout>;
+    using Tag = AudioChannelLayout::Tag;
+    static const DirectMap mInAndVoice = make_DirectMap(
+            getInAudioChannelPairs(), getVoiceAudioChannelPairs());
+    static const DirectMap mOut = make_DirectMap(getOutAudioChannelPairs());
+
+    auto convert = [](const audio_channel_mask_t legacy, const DirectMap& m,
+            const char* func, const char* type) -> ConversionResult<AudioChannelLayout> {
+        if (auto it = m.find(legacy); it != m.end()) {
+            return it->second;
+        } else {
+            ALOGW("%s: no AudioChannelLayout found for legacy %s audio_channel_mask_t value 0x%x",
+                    func, type, legacy);
+            return unexpected(BAD_VALUE);
+        }
+    };
+
+    if (legacy == AUDIO_CHANNEL_NONE) {
+        return AudioChannelLayout{};
+    } else if (legacy == AUDIO_CHANNEL_INVALID) {
+        return AudioChannelLayout::make<Tag::invalid>(0);
+    }
+
+    const audio_channel_representation_t repr = audio_channel_mask_get_representation(legacy);
+    if (repr == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+        if (audio_channel_mask_is_valid(legacy)) {
+            const int indexMask = VALUE_OR_RETURN(
+                    convertIntegral<int>(audio_channel_mask_get_bits(legacy)));
+            return AudioChannelLayout::make<Tag::indexMask>(indexMask);
+        } else {
+            ALOGE("%s: legacy audio_channel_mask_t value 0x%x is invalid", __func__, legacy);
+            return unexpected(BAD_VALUE);
+        }
+    } else if (repr == AUDIO_CHANNEL_REPRESENTATION_POSITION) {
+        // The fast path is to find a direct match for some known layout mask.
+        if (const auto layoutMatch = convert(legacy, isInput ? mInAndVoice : mOut, __func__,
+                isInput ? "input / voice" : "output");
+                layoutMatch.ok()) {
+            return layoutMatch;
+        }
+        // If a match for a predefined layout wasn't found, make a custom one from bits,
+        // rejecting those with voice channel bits.
+        if (!isInput ||
+                (legacy & (AUDIO_CHANNEL_IN_VOICE_UPLINK | AUDIO_CHANNEL_IN_VOICE_DNLINK)) == 0) {
+            if (int bitMaskLayout =
+                    legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+                            legacy, isInput);
+                    bitMaskLayout != 0) {
+                return AudioChannelLayout::make<Tag::layoutMask>(bitMaskLayout);
+            }
+        } else {
+            ALOGE("%s: legacy audio_channel_mask_t value 0x%x contains voice bits",
+                    __func__, legacy);
+        }
+        return unexpected(BAD_VALUE);
+    }
+
+    ALOGE("%s: unknown representation %d in audio_channel_mask_t value 0x%x",
+            __func__, repr, legacy);
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
+        const AudioDeviceDescription& aidl) {
+    static const std::unordered_map<AudioDeviceDescription, audio_devices_t> m =
+            make_ReverseMap(getAudioDevicePairs());
+    if (auto it = m.find(aidl); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no legacy audio_devices_t found for %s", __func__, aidl.toString().c_str());
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<AudioDeviceDescription> legacy2aidl_audio_devices_t_AudioDeviceDescription(
+        audio_devices_t legacy) {
+    static const std::unordered_map<audio_devices_t, AudioDeviceDescription> m =
+            make_DirectMap(getAudioDevicePairs());
+    if (auto it = m.find(legacy); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no AudioDeviceDescription found for legacy audio_devices_t value 0x%x",
+                __func__, legacy);
+        return unexpected(BAD_VALUE);
+    }
+}
+
+status_t aidl2legacy_AudioDevice_audio_device(
+        const AudioDevice& aidl,
+        audio_devices_t* legacyType, char* legacyAddress) {
+    *legacyType = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
+    return aidl2legacy_string(
+                    aidl.address.get<AudioDeviceAddress::id>(),
+                    legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
+}
+
+status_t aidl2legacy_AudioDevice_audio_device(
+        const AudioDevice& aidl,
+        audio_devices_t* legacyType, String8* legacyAddress) {
+    *legacyType = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
+    *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(
+                    aidl.address.get<AudioDeviceAddress::id>()));
+    return OK;
+}
+
+status_t aidl2legacy_AudioDevice_audio_device(
+        const AudioDevice& aidl,
+        audio_devices_t* legacyType, std::string* legacyAddress) {
+    *legacyType = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
+    *legacyAddress = aidl.address.get<AudioDeviceAddress::id>();
+    return OK;
+}
+
+ConversionResult<AudioDevice> legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const char* legacyAddress) {
+    AudioDevice aidl;
+    aidl.type = VALUE_OR_RETURN(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
+    const std::string aidl_id = VALUE_OR_RETURN(
+            legacy2aidl_string(legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
+    return aidl;
+}
+
+ConversionResult<AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const String8& legacyAddress) {
+    AudioDevice aidl;
+    aidl.type = VALUE_OR_RETURN(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
+    const std::string aidl_id = VALUE_OR_RETURN(
+            legacy2aidl_String8_string(legacyAddress));
+    aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
+    return aidl;
+}
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
+        const AudioFormatDescription& aidl) {
+    static const std::unordered_map<AudioFormatDescription, audio_format_t> m =
+            make_ReverseMap(getAudioFormatPairs());
+    if (auto it = m.find(aidl); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no legacy audio_format_t found for %s", __func__, aidl.toString().c_str());
+        return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<AudioFormatDescription> legacy2aidl_audio_format_t_AudioFormatDescription(
         audio_format_t legacy) {
-    // This relies on AudioFormat being kept in sync with audio_format_t.
-    static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
-    return static_cast<media::audio::common::AudioFormat>(legacy);
+    static const std::unordered_map<audio_format_t, AudioFormatDescription> m =
+            make_DirectMap(getAudioFormatPairs());
+    if (auto it = m.find(legacy); it != m.end()) {
+        return it->second;
+    } else {
+        ALOGE("%s: no AudioFormatDescription found for legacy audio_format_t value 0x%x",
+                __func__, legacy);
+        return unexpected(BAD_VALUE);
+    }
 }
 
-ConversionResult<audio_gain_mode_t> aidl2legacy_AudioGainMode_audio_gain_mode_t(media::AudioGainMode aidl) {
+ConversionResult<audio_gain_mode_t> aidl2legacy_AudioGainMode_audio_gain_mode_t(
+        AudioGainMode aidl) {
     switch (aidl) {
-        case media::AudioGainMode::JOINT:
+        case AudioGainMode::JOINT:
             return AUDIO_GAIN_MODE_JOINT;
-        case media::AudioGainMode::CHANNELS:
+        case AudioGainMode::CHANNELS:
             return AUDIO_GAIN_MODE_CHANNELS;
-        case media::AudioGainMode::RAMP:
+        case AudioGainMode::RAMP:
             return AUDIO_GAIN_MODE_RAMP;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioGainMode> legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy) {
+ConversionResult<AudioGainMode> legacy2aidl_audio_gain_mode_t_AudioGainMode(
+        audio_gain_mode_t legacy) {
     switch (legacy) {
         case AUDIO_GAIN_MODE_JOINT:
-            return media::AudioGainMode::JOINT;
+            return AudioGainMode::JOINT;
         case AUDIO_GAIN_MODE_CHANNELS:
-            return media::AudioGainMode::CHANNELS;
+            return AudioGainMode::CHANNELS;
         case AUDIO_GAIN_MODE_RAMP:
-            return media::AudioGainMode::RAMP;
+            return AudioGainMode::RAMP;
     }
     return unexpected(BAD_VALUE);
 }
 
 ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl) {
-    return convertBitmask<audio_gain_mode_t, int32_t, audio_gain_mode_t, media::AudioGainMode>(
+    return convertBitmask<audio_gain_mode_t, int32_t, audio_gain_mode_t, AudioGainMode>(
             aidl, aidl2legacy_AudioGainMode_audio_gain_mode_t,
             // AudioGainMode is index-based.
-            indexToEnum_index<media::AudioGainMode>,
+            indexToEnum_index<AudioGainMode>,
             // AUDIO_GAIN_MODE_* constants are mask-based.
             enumToMask_bitmask<audio_gain_mode_t, audio_gain_mode_t>);
 }
 
 ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy) {
-    return convertBitmask<int32_t, audio_gain_mode_t, media::AudioGainMode, audio_gain_mode_t>(
+    return convertBitmask<int32_t, audio_gain_mode_t, AudioGainMode, audio_gain_mode_t>(
             legacy, legacy2aidl_audio_gain_mode_t_AudioGainMode,
             // AUDIO_GAIN_MODE_* constants are mask-based.
             indexToEnum_bitmask<audio_gain_mode_t>,
             // AudioGainMode is index-based.
-            enumToMask_index<int32_t, media::AudioGainMode>);
-}
-
-ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl) {
-    // TODO(ytai): bitfield?
-    return convertReinterpret<audio_devices_t>(aidl);
-}
-
-ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy) {
-    // TODO(ytai): bitfield?
-    return convertReinterpret<int32_t>(legacy);
+            enumToMask_index<int32_t, AudioGainMode>);
 }
 
 ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
-        const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type) {
+        const AudioGainConfig& aidl, bool isInput) {
     audio_gain_config legacy;
     legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
     legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
-    legacy.channel_mask =
-            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
-    const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
-    const bool isJoint = bitmaskIsSet(aidl.mode, media::AudioGainMode::JOINT);
+    legacy.channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+    const bool isJoint = bitmaskIsSet(aidl.mode, AudioGainMode::JOINT);
     size_t numValues = isJoint ? 1
                                : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
                                          : audio_channel_count_from_out_mask(legacy.channel_mask);
@@ -481,14 +1401,13 @@
     return legacy;
 }
 
-ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
-        const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type) {
-    media::AudioGainConfig aidl;
+ConversionResult<AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+        const audio_gain_config& legacy, bool isInput) {
+    AudioGainConfig aidl;
     aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
     aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
-    aidl.channelMask =
-            VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
-    const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+    aidl.channelMask = VALUE_OR_RETURN(
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
     const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
     size_t numValues = isJoint ? 1
                                : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
@@ -502,129 +1421,141 @@
 }
 
 ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
-        media::AudioInputFlags aidl) {
+        AudioInputFlags aidl) {
     switch (aidl) {
-        case media::AudioInputFlags::FAST:
+        case AudioInputFlags::FAST:
             return AUDIO_INPUT_FLAG_FAST;
-        case media::AudioInputFlags::HW_HOTWORD:
+        case AudioInputFlags::HW_HOTWORD:
             return AUDIO_INPUT_FLAG_HW_HOTWORD;
-        case media::AudioInputFlags::RAW:
+        case AudioInputFlags::RAW:
             return AUDIO_INPUT_FLAG_RAW;
-        case media::AudioInputFlags::SYNC:
+        case AudioInputFlags::SYNC:
             return AUDIO_INPUT_FLAG_SYNC;
-        case media::AudioInputFlags::MMAP_NOIRQ:
+        case AudioInputFlags::MMAP_NOIRQ:
             return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
-        case media::AudioInputFlags::VOIP_TX:
+        case AudioInputFlags::VOIP_TX:
             return AUDIO_INPUT_FLAG_VOIP_TX;
-        case media::AudioInputFlags::HW_AV_SYNC:
+        case AudioInputFlags::HW_AV_SYNC:
             return AUDIO_INPUT_FLAG_HW_AV_SYNC;
-        case media::AudioInputFlags::DIRECT:
+        case AudioInputFlags::DIRECT:
             return AUDIO_INPUT_FLAG_DIRECT;
+        case AudioInputFlags::ULTRASOUND:
+            return AUDIO_INPUT_FLAG_ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+ConversionResult<AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
         audio_input_flags_t legacy) {
     switch (legacy) {
         case AUDIO_INPUT_FLAG_NONE:
             break; // shouldn't get here. must be listed  -Werror,-Wswitch
         case AUDIO_INPUT_FLAG_FAST:
-            return media::AudioInputFlags::FAST;
+            return AudioInputFlags::FAST;
         case AUDIO_INPUT_FLAG_HW_HOTWORD:
-            return media::AudioInputFlags::HW_HOTWORD;
+            return AudioInputFlags::HW_HOTWORD;
         case AUDIO_INPUT_FLAG_RAW:
-            return media::AudioInputFlags::RAW;
+            return AudioInputFlags::RAW;
         case AUDIO_INPUT_FLAG_SYNC:
-            return media::AudioInputFlags::SYNC;
+            return AudioInputFlags::SYNC;
         case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
-            return media::AudioInputFlags::MMAP_NOIRQ;
+            return AudioInputFlags::MMAP_NOIRQ;
         case AUDIO_INPUT_FLAG_VOIP_TX:
-            return media::AudioInputFlags::VOIP_TX;
+            return AudioInputFlags::VOIP_TX;
         case AUDIO_INPUT_FLAG_HW_AV_SYNC:
-            return media::AudioInputFlags::HW_AV_SYNC;
+            return AudioInputFlags::HW_AV_SYNC;
         case AUDIO_INPUT_FLAG_DIRECT:
-            return media::AudioInputFlags::DIRECT;
+            return AudioInputFlags::DIRECT;
+        case AUDIO_INPUT_FLAG_ULTRASOUND:
+            return AudioInputFlags::ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
 
 ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
-        media::AudioOutputFlags aidl) {
+        AudioOutputFlags aidl) {
     switch (aidl) {
-        case media::AudioOutputFlags::DIRECT:
+        case AudioOutputFlags::DIRECT:
             return AUDIO_OUTPUT_FLAG_DIRECT;
-        case media::AudioOutputFlags::PRIMARY:
+        case AudioOutputFlags::PRIMARY:
             return AUDIO_OUTPUT_FLAG_PRIMARY;
-        case media::AudioOutputFlags::FAST:
+        case AudioOutputFlags::FAST:
             return AUDIO_OUTPUT_FLAG_FAST;
-        case media::AudioOutputFlags::DEEP_BUFFER:
+        case AudioOutputFlags::DEEP_BUFFER:
             return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
-        case media::AudioOutputFlags::COMPRESS_OFFLOAD:
+        case AudioOutputFlags::COMPRESS_OFFLOAD:
             return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
-        case media::AudioOutputFlags::NON_BLOCKING:
+        case AudioOutputFlags::NON_BLOCKING:
             return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
-        case media::AudioOutputFlags::HW_AV_SYNC:
+        case AudioOutputFlags::HW_AV_SYNC:
             return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
-        case media::AudioOutputFlags::TTS:
+        case AudioOutputFlags::TTS:
             return AUDIO_OUTPUT_FLAG_TTS;
-        case media::AudioOutputFlags::RAW:
+        case AudioOutputFlags::RAW:
             return AUDIO_OUTPUT_FLAG_RAW;
-        case media::AudioOutputFlags::SYNC:
+        case AudioOutputFlags::SYNC:
             return AUDIO_OUTPUT_FLAG_SYNC;
-        case media::AudioOutputFlags::IEC958_NONAUDIO:
+        case AudioOutputFlags::IEC958_NONAUDIO:
             return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
-        case media::AudioOutputFlags::DIRECT_PCM:
+        case AudioOutputFlags::DIRECT_PCM:
             return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
-        case media::AudioOutputFlags::MMAP_NOIRQ:
+        case AudioOutputFlags::MMAP_NOIRQ:
             return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
-        case media::AudioOutputFlags::VOIP_RX:
+        case AudioOutputFlags::VOIP_RX:
             return AUDIO_OUTPUT_FLAG_VOIP_RX;
-        case media::AudioOutputFlags::INCALL_MUSIC:
+        case AudioOutputFlags::INCALL_MUSIC:
             return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
-        case media::AudioOutputFlags::GAPLESS_OFFLOAD:
+        case AudioOutputFlags::GAPLESS_OFFLOAD:
             return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
+        case AudioOutputFlags::ULTRASOUND:
+            return AUDIO_OUTPUT_FLAG_ULTRASOUND;
+        case AudioOutputFlags::SPATIALIZER:
+            return AUDIO_OUTPUT_FLAG_SPATIALIZER;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+ConversionResult<AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
         audio_output_flags_t legacy) {
     switch (legacy) {
         case AUDIO_OUTPUT_FLAG_NONE:
             break; // shouldn't get here. must be listed  -Werror,-Wswitch
         case AUDIO_OUTPUT_FLAG_DIRECT:
-            return media::AudioOutputFlags::DIRECT;
+            return AudioOutputFlags::DIRECT;
         case AUDIO_OUTPUT_FLAG_PRIMARY:
-            return media::AudioOutputFlags::PRIMARY;
+            return AudioOutputFlags::PRIMARY;
         case AUDIO_OUTPUT_FLAG_FAST:
-            return media::AudioOutputFlags::FAST;
+            return AudioOutputFlags::FAST;
         case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
-            return media::AudioOutputFlags::DEEP_BUFFER;
+            return AudioOutputFlags::DEEP_BUFFER;
         case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
-            return media::AudioOutputFlags::COMPRESS_OFFLOAD;
+            return AudioOutputFlags::COMPRESS_OFFLOAD;
         case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
-            return media::AudioOutputFlags::NON_BLOCKING;
+            return AudioOutputFlags::NON_BLOCKING;
         case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
-            return media::AudioOutputFlags::HW_AV_SYNC;
+            return AudioOutputFlags::HW_AV_SYNC;
         case AUDIO_OUTPUT_FLAG_TTS:
-            return media::AudioOutputFlags::TTS;
+            return AudioOutputFlags::TTS;
         case AUDIO_OUTPUT_FLAG_RAW:
-            return media::AudioOutputFlags::RAW;
+            return AudioOutputFlags::RAW;
         case AUDIO_OUTPUT_FLAG_SYNC:
-            return media::AudioOutputFlags::SYNC;
+            return AudioOutputFlags::SYNC;
         case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
-            return media::AudioOutputFlags::IEC958_NONAUDIO;
+            return AudioOutputFlags::IEC958_NONAUDIO;
         case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
-            return media::AudioOutputFlags::DIRECT_PCM;
+            return AudioOutputFlags::DIRECT_PCM;
         case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
-            return media::AudioOutputFlags::MMAP_NOIRQ;
+            return AudioOutputFlags::MMAP_NOIRQ;
         case AUDIO_OUTPUT_FLAG_VOIP_RX:
-            return media::AudioOutputFlags::VOIP_RX;
+            return AudioOutputFlags::VOIP_RX;
         case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
-            return media::AudioOutputFlags::INCALL_MUSIC;
+            return AudioOutputFlags::INCALL_MUSIC;
         case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
-            return media::AudioOutputFlags::GAPLESS_OFFLOAD;
+            return AudioOutputFlags::GAPLESS_OFFLOAD;
+        case AUDIO_OUTPUT_FLAG_ULTRASOUND:
+            return AudioOutputFlags::ULTRASOUND;
+        case AUDIO_OUTPUT_FLAG_SPATIALIZER:
+            return AudioOutputFlags::SPATIALIZER;
     }
     return unexpected(BAD_VALUE);
 }
@@ -634,9 +1565,9 @@
     using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
 
     LegacyMask converted = VALUE_OR_RETURN(
-            (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, media::AudioInputFlags>(
+            (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, AudioInputFlags>(
                     aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
-                    indexToEnum_index<media::AudioInputFlags>,
+                    indexToEnum_index<AudioInputFlags>,
                     enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
     return static_cast<audio_input_flags_t>(converted);
 }
@@ -646,10 +1577,10 @@
     using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
 
     LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
-    return convertBitmask<int32_t, LegacyMask, media::AudioInputFlags, audio_input_flags_t>(
+    return convertBitmask<int32_t, LegacyMask, AudioInputFlags, audio_input_flags_t>(
             legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
             indexToEnum_bitmask<audio_input_flags_t>,
-            enumToMask_index<int32_t, media::AudioInputFlags>);
+            enumToMask_index<int32_t, AudioInputFlags>);
 }
 
 ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
@@ -657,9 +1588,9 @@
     return convertBitmask<audio_output_flags_t,
             int32_t,
             audio_output_flags_t,
-            media::AudioOutputFlags>(
+            AudioOutputFlags>(
             aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
-            indexToEnum_index<media::AudioOutputFlags>,
+            indexToEnum_index<AudioOutputFlags>,
             enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
 }
 
@@ -668,225 +1599,215 @@
     using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
 
     LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
-    return convertBitmask<int32_t, LegacyMask, media::AudioOutputFlags, audio_output_flags_t>(
+    return convertBitmask<int32_t, LegacyMask, AudioOutputFlags, audio_output_flags_t>(
             legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
             indexToEnum_bitmask<audio_output_flags_t>,
-            enumToMask_index<int32_t, media::AudioOutputFlags>);
+            enumToMask_index<int32_t, AudioOutputFlags>);
 }
 
 ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
-        const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
+        const AudioIoFlags& aidl, bool isInput) {
     audio_io_flags legacy;
-    Direction dir = VALUE_OR_RETURN(direction(role, type));
-    switch (dir) {
-        case Direction::INPUT: {
-            legacy.input = VALUE_OR_RETURN(
-                    aidl2legacy_int32_t_audio_input_flags_t_mask(
-                            VALUE_OR_RETURN(UNION_GET(aidl, input))));
-        }
-            break;
-
-        case Direction::OUTPUT: {
-            legacy.output = VALUE_OR_RETURN(
-                    aidl2legacy_int32_t_audio_output_flags_t_mask(
-                            VALUE_OR_RETURN(UNION_GET(aidl, output))));
-        }
-            break;
+    if (isInput) {
+        legacy.input = VALUE_OR_RETURN(
+                aidl2legacy_int32_t_audio_input_flags_t_mask(
+                        VALUE_OR_RETURN(UNION_GET(aidl, input))));
+    } else {
+        legacy.output = VALUE_OR_RETURN(
+                aidl2legacy_int32_t_audio_output_flags_t_mask(
+                        VALUE_OR_RETURN(UNION_GET(aidl, output))));
     }
-
     return legacy;
 }
 
-ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
-        const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type) {
-    media::AudioIoFlags aidl;
-
-    Direction dir = VALUE_OR_RETURN(direction(role, type));
-    switch (dir) {
-        case Direction::INPUT:
-            UNION_SET(aidl, input,
-                      VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(
-                              legacy.input)));
-            break;
-        case Direction::OUTPUT:
-            UNION_SET(aidl, output,
-                      VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(
-                              legacy.output)));
-            break;
+ConversionResult<AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, bool isInput) {
+    AudioIoFlags aidl;
+    if (isInput) {
+        UNION_SET(aidl, input,
+                VALUE_OR_RETURN(legacy2aidl_audio_input_flags_t_int32_t_mask(legacy.input)));
+    } else {
+        UNION_SET(aidl, output,
+                VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(legacy.output)));
     }
     return aidl;
 }
 
 ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
-        const media::AudioPortConfigDeviceExt& aidl) {
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+        const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlDeviceExt) {
     audio_port_config_device_ext legacy;
-    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
-    legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
-    RETURN_IF_ERROR(aidl2legacy_string(aidl.address, legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+    legacy.hw_module = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_module_handle_t(aidlDeviceExt.hwModule));
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl.device, &legacy.type, legacy.address));
     return legacy;
 }
 
-ConversionResult<media::AudioPortConfigDeviceExt>
-legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
-        const audio_port_config_device_ext& legacy) {
-    media::AudioPortConfigDeviceExt aidl;
-    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
-    aidl.address = VALUE_OR_RETURN(
-            legacy2aidl_string(legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN));
-    return aidl;
+status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+        const audio_port_config_device_ext& legacy,
+        AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+    aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl->device = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    return OK;
 }
 
 ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
-        media::AudioStreamType aidl) {
+        AudioStreamType aidl) {
     switch (aidl) {
-        case media::AudioStreamType::DEFAULT:
+        case AudioStreamType::INVALID:
+            break;  // return error
+        case AudioStreamType::SYS_RESERVED_DEFAULT:
             return AUDIO_STREAM_DEFAULT;
-        case media::AudioStreamType::VOICE_CALL:
+        case AudioStreamType::VOICE_CALL:
             return AUDIO_STREAM_VOICE_CALL;
-        case media::AudioStreamType::SYSTEM:
+        case AudioStreamType::SYSTEM:
             return AUDIO_STREAM_SYSTEM;
-        case media::AudioStreamType::RING:
+        case AudioStreamType::RING:
             return AUDIO_STREAM_RING;
-        case media::AudioStreamType::MUSIC:
+        case AudioStreamType::MUSIC:
             return AUDIO_STREAM_MUSIC;
-        case media::AudioStreamType::ALARM:
+        case AudioStreamType::ALARM:
             return AUDIO_STREAM_ALARM;
-        case media::AudioStreamType::NOTIFICATION:
+        case AudioStreamType::NOTIFICATION:
             return AUDIO_STREAM_NOTIFICATION;
-        case media::AudioStreamType::BLUETOOTH_SCO:
+        case AudioStreamType::BLUETOOTH_SCO:
             return AUDIO_STREAM_BLUETOOTH_SCO;
-        case media::AudioStreamType::ENFORCED_AUDIBLE:
+        case AudioStreamType::ENFORCED_AUDIBLE:
             return AUDIO_STREAM_ENFORCED_AUDIBLE;
-        case media::AudioStreamType::DTMF:
+        case AudioStreamType::DTMF:
             return AUDIO_STREAM_DTMF;
-        case media::AudioStreamType::TTS:
+        case AudioStreamType::TTS:
             return AUDIO_STREAM_TTS;
-        case media::AudioStreamType::ACCESSIBILITY:
+        case AudioStreamType::ACCESSIBILITY:
             return AUDIO_STREAM_ACCESSIBILITY;
-        case media::AudioStreamType::ASSISTANT:
+        case AudioStreamType::ASSISTANT:
             return AUDIO_STREAM_ASSISTANT;
-        case media::AudioStreamType::REROUTING:
+        case AudioStreamType::SYS_RESERVED_REROUTING:
             return AUDIO_STREAM_REROUTING;
-        case media::AudioStreamType::PATCH:
+        case AudioStreamType::SYS_RESERVED_PATCH:
             return AUDIO_STREAM_PATCH;
-        case media::AudioStreamType::CALL_ASSISTANT:
+        case AudioStreamType::CALL_ASSISTANT:
             return AUDIO_STREAM_CALL_ASSISTANT;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+ConversionResult<AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
         audio_stream_type_t legacy) {
     switch (legacy) {
         case AUDIO_STREAM_DEFAULT:
-            return media::AudioStreamType::DEFAULT;
+            return AudioStreamType::SYS_RESERVED_DEFAULT;
         case AUDIO_STREAM_VOICE_CALL:
-            return media::AudioStreamType::VOICE_CALL;
+            return AudioStreamType::VOICE_CALL;
         case AUDIO_STREAM_SYSTEM:
-            return media::AudioStreamType::SYSTEM;
+            return AudioStreamType::SYSTEM;
         case AUDIO_STREAM_RING:
-            return media::AudioStreamType::RING;
+            return AudioStreamType::RING;
         case AUDIO_STREAM_MUSIC:
-            return media::AudioStreamType::MUSIC;
+            return AudioStreamType::MUSIC;
         case AUDIO_STREAM_ALARM:
-            return media::AudioStreamType::ALARM;
+            return AudioStreamType::ALARM;
         case AUDIO_STREAM_NOTIFICATION:
-            return media::AudioStreamType::NOTIFICATION;
+            return AudioStreamType::NOTIFICATION;
         case AUDIO_STREAM_BLUETOOTH_SCO:
-            return media::AudioStreamType::BLUETOOTH_SCO;
+            return AudioStreamType::BLUETOOTH_SCO;
         case AUDIO_STREAM_ENFORCED_AUDIBLE:
-            return media::AudioStreamType::ENFORCED_AUDIBLE;
+            return AudioStreamType::ENFORCED_AUDIBLE;
         case AUDIO_STREAM_DTMF:
-            return media::AudioStreamType::DTMF;
+            return AudioStreamType::DTMF;
         case AUDIO_STREAM_TTS:
-            return media::AudioStreamType::TTS;
+            return AudioStreamType::TTS;
         case AUDIO_STREAM_ACCESSIBILITY:
-            return media::AudioStreamType::ACCESSIBILITY;
+            return AudioStreamType::ACCESSIBILITY;
         case AUDIO_STREAM_ASSISTANT:
-            return media::AudioStreamType::ASSISTANT;
+            return AudioStreamType::ASSISTANT;
         case AUDIO_STREAM_REROUTING:
-            return media::AudioStreamType::REROUTING;
+            return AudioStreamType::SYS_RESERVED_REROUTING;
         case AUDIO_STREAM_PATCH:
-            return media::AudioStreamType::PATCH;
+            return AudioStreamType::SYS_RESERVED_PATCH;
         case AUDIO_STREAM_CALL_ASSISTANT:
-            return media::AudioStreamType::CALL_ASSISTANT;
+            return AudioStreamType::CALL_ASSISTANT;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
-        media::AudioSourceType aidl) {
+ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
+        AudioSource aidl) {
     switch (aidl) {
-        case media::AudioSourceType::INVALID:
-            // This value does not have an enum
+        case AudioSource::SYS_RESERVED_INVALID:
             return AUDIO_SOURCE_INVALID;
-        case media::AudioSourceType::DEFAULT:
+        case AudioSource::DEFAULT:
             return AUDIO_SOURCE_DEFAULT;
-        case media::AudioSourceType::MIC:
+        case AudioSource::MIC:
             return AUDIO_SOURCE_MIC;
-        case media::AudioSourceType::VOICE_UPLINK:
+        case AudioSource::VOICE_UPLINK:
             return AUDIO_SOURCE_VOICE_UPLINK;
-        case media::AudioSourceType::VOICE_DOWNLINK:
+        case AudioSource::VOICE_DOWNLINK:
             return AUDIO_SOURCE_VOICE_DOWNLINK;
-        case media::AudioSourceType::VOICE_CALL:
+        case AudioSource::VOICE_CALL:
             return AUDIO_SOURCE_VOICE_CALL;
-        case media::AudioSourceType::CAMCORDER:
+        case AudioSource::CAMCORDER:
             return AUDIO_SOURCE_CAMCORDER;
-        case media::AudioSourceType::VOICE_RECOGNITION:
+        case AudioSource::VOICE_RECOGNITION:
             return AUDIO_SOURCE_VOICE_RECOGNITION;
-        case media::AudioSourceType::VOICE_COMMUNICATION:
+        case AudioSource::VOICE_COMMUNICATION:
             return AUDIO_SOURCE_VOICE_COMMUNICATION;
-        case media::AudioSourceType::REMOTE_SUBMIX:
+        case AudioSource::REMOTE_SUBMIX:
             return AUDIO_SOURCE_REMOTE_SUBMIX;
-        case media::AudioSourceType::UNPROCESSED:
+        case AudioSource::UNPROCESSED:
             return AUDIO_SOURCE_UNPROCESSED;
-        case media::AudioSourceType::VOICE_PERFORMANCE:
+        case AudioSource::VOICE_PERFORMANCE:
             return AUDIO_SOURCE_VOICE_PERFORMANCE;
-        case media::AudioSourceType::ECHO_REFERENCE:
+        case AudioSource::ULTRASOUND:
+            return AUDIO_SOURCE_ULTRASOUND;
+        case AudioSource::ECHO_REFERENCE:
             return AUDIO_SOURCE_ECHO_REFERENCE;
-        case media::AudioSourceType::FM_TUNER:
+        case AudioSource::FM_TUNER:
             return AUDIO_SOURCE_FM_TUNER;
-        case media::AudioSourceType::HOTWORD:
+        case AudioSource::HOTWORD:
             return AUDIO_SOURCE_HOTWORD;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+ConversionResult<AudioSource> legacy2aidl_audio_source_t_AudioSource(
         audio_source_t legacy) {
     switch (legacy) {
         case AUDIO_SOURCE_INVALID:
-            return media::AudioSourceType::INVALID;
+            return AudioSource::SYS_RESERVED_INVALID;
         case AUDIO_SOURCE_DEFAULT:
-            return media::AudioSourceType::DEFAULT;
+            return AudioSource::DEFAULT;
         case AUDIO_SOURCE_MIC:
-            return media::AudioSourceType::MIC;
+            return AudioSource::MIC;
         case AUDIO_SOURCE_VOICE_UPLINK:
-            return media::AudioSourceType::VOICE_UPLINK;
+            return AudioSource::VOICE_UPLINK;
         case AUDIO_SOURCE_VOICE_DOWNLINK:
-            return media::AudioSourceType::VOICE_DOWNLINK;
+            return AudioSource::VOICE_DOWNLINK;
         case AUDIO_SOURCE_VOICE_CALL:
-            return media::AudioSourceType::VOICE_CALL;
+            return AudioSource::VOICE_CALL;
         case AUDIO_SOURCE_CAMCORDER:
-            return media::AudioSourceType::CAMCORDER;
+            return AudioSource::CAMCORDER;
         case AUDIO_SOURCE_VOICE_RECOGNITION:
-            return media::AudioSourceType::VOICE_RECOGNITION;
+            return AudioSource::VOICE_RECOGNITION;
         case AUDIO_SOURCE_VOICE_COMMUNICATION:
-            return media::AudioSourceType::VOICE_COMMUNICATION;
+            return AudioSource::VOICE_COMMUNICATION;
         case AUDIO_SOURCE_REMOTE_SUBMIX:
-            return media::AudioSourceType::REMOTE_SUBMIX;
+            return AudioSource::REMOTE_SUBMIX;
         case AUDIO_SOURCE_UNPROCESSED:
-            return media::AudioSourceType::UNPROCESSED;
+            return AudioSource::UNPROCESSED;
         case AUDIO_SOURCE_VOICE_PERFORMANCE:
-            return media::AudioSourceType::VOICE_PERFORMANCE;
+            return AudioSource::VOICE_PERFORMANCE;
+        case AUDIO_SOURCE_ULTRASOUND:
+            return AudioSource::ULTRASOUND;
         case AUDIO_SOURCE_ECHO_REFERENCE:
-            return media::AudioSourceType::ECHO_REFERENCE;
+            return AudioSource::ECHO_REFERENCE;
         case AUDIO_SOURCE_FM_TUNER:
-            return media::AudioSourceType::FM_TUNER;
+            return AudioSource::FM_TUNER;
         case AUDIO_SOURCE_HOTWORD:
-            return media::AudioSourceType::HOTWORD;
+            return AudioSource::HOTWORD;
     }
     return unexpected(BAD_VALUE);
 }
@@ -902,8 +1823,8 @@
 // This type is unnamed in the original definition, thus we name it here.
 using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
 
-ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortConfigMixExtUseCase(
-        const media::AudioPortConfigMixExtUseCase& aidl, media::AudioPortRole role) {
+ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortMixExtUseCase(
+        const AudioPortMixExtUseCase& aidl, media::AudioPortRole role) {
     audio_port_config_mix_ext_usecase legacy;
 
     switch (role) {
@@ -920,16 +1841,16 @@
 
         case media::AudioPortRole::SINK:
             // This is not a bug. A SINK role corresponds to the source field.
-            legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(
+            legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
                     VALUE_OR_RETURN(UNION_GET(aidl, source))));
             return legacy;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
-ConversionResult<media::AudioPortConfigMixExtUseCase> legacy2aidl_AudioPortConfigMixExtUseCase(
+ConversionResult<AudioPortMixExtUseCase> legacy2aidl_AudioPortMixExtUseCase(
         const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
-    media::AudioPortConfigMixExtUseCase aidl;
+    AudioPortMixExtUseCase aidl;
 
     switch (role) {
         case AUDIO_PORT_ROLE_NONE:
@@ -943,52 +1864,53 @@
         case AUDIO_PORT_ROLE_SINK:
             // This is not a bug. A SINK role corresponds to the source field.
             UNION_SET(aidl, source,
-                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source)));
+                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source)));
             return aidl;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
-ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
-        const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role) {
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
+        const AudioPortMixExt& aidl, media::AudioPortRole role,
+        const media::AudioPortMixExtSys& aidlMixExt) {
     audio_port_config_mix_ext legacy;
-    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+    legacy.hw_module = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_module_handle_t(aidlMixExt.hwModule));
     legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
-    legacy.usecase = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigMixExtUseCase(aidl.usecase, role));
+    legacy.usecase = VALUE_OR_RETURN(aidl2legacy_AudioPortMixExtUseCase(aidl.usecase, role));
     return legacy;
 }
 
-ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
-        const audio_port_config_mix_ext& legacy, audio_port_role_t role) {
-    media::AudioPortConfigMixExt aidl;
-    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
-    aidl.usecase = VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExtUseCase(legacy.usecase, role));
-    return aidl;
+status_t legacy2aidl_AudioPortMixExt(
+        const audio_port_config_mix_ext& legacy, audio_port_role_t role,
+        AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt) {
+    aidlMixExt->hwModule = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl->handle = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    aidl->usecase = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioPortMixExtUseCase(legacy.usecase, role));
+    return OK;
 }
 
 ConversionResult<audio_port_config_session_ext>
-aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
-        const media::AudioPortConfigSessionExt& aidl) {
+aidl2legacy_int32_t_audio_port_config_session_ext(int32_t aidl) {
     audio_port_config_session_ext legacy;
-    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl));
     return legacy;
 }
 
-ConversionResult<media::AudioPortConfigSessionExt>
-legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+ConversionResult<int32_t>
+legacy2aidl_audio_port_config_session_ext_int32_t(
         const audio_port_config_session_ext& legacy) {
-    media::AudioPortConfigSessionExt aidl;
-    aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
-    return aidl;
+    return legacy2aidl_audio_session_t_int32_t(legacy.session);
 }
 
 // This type is unnamed in the original definition, thus we name it here.
 using audio_port_config_ext = decltype(audio_port_config::ext);
 
-ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortConfigExt(
-        const media::AudioPortConfigExt& aidl, media::AudioPortType type,
-        media::AudioPortRole role) {
+ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortExt_audio_port_config_ext(
+        const AudioPortExt& aidl, media::AudioPortType type,
+        media::AudioPortRole role, const media::AudioPortExtSys& aidlSys) {
     audio_port_config_ext legacy;
     switch (type) {
         case media::AudioPortType::NONE:
@@ -997,16 +1919,19 @@
             return legacy;
         case media::AudioPortType::DEVICE:
             legacy.device = VALUE_OR_RETURN(
-                    aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, device))));
+                    aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+                            VALUE_OR_RETURN(UNION_GET(aidl, device)),
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, device))));
             return legacy;
         case media::AudioPortType::MIX:
             legacy.mix = VALUE_OR_RETURN(
-                    aidl2legacy_AudioPortConfigMixExt(VALUE_OR_RETURN(UNION_GET(aidl, mix)), role));
+                    aidl2legacy_AudioPortMixExt(
+                            VALUE_OR_RETURN(UNION_GET(aidl, mix)), role,
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, mix))));
             return legacy;
         case media::AudioPortType::SESSION:
             legacy.session = VALUE_OR_RETURN(
-                    aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
+                    aidl2legacy_int32_t_audio_port_config_session_ext(
                             VALUE_OR_RETURN(UNION_GET(aidl, session))));
             return legacy;
 
@@ -1014,90 +1939,113 @@
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
-ConversionResult<media::AudioPortConfigExt> legacy2aidl_AudioPortConfigExt(
-        const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role) {
-    media::AudioPortConfigExt aidl;
-
+status_t legacy2aidl_AudioPortExt(
+        const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role,
+        AudioPortExt* aidl, media::AudioPortExtSys* aidlSys) {
     switch (type) {
         case AUDIO_PORT_TYPE_NONE:
-            UNION_SET(aidl, unspecified, false);
-            return aidl;
-        case AUDIO_PORT_TYPE_DEVICE:
-            UNION_SET(aidl, device,
-                      VALUE_OR_RETURN(
-                        legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
-                          legacy.device)));
-            return aidl;
-        case AUDIO_PORT_TYPE_MIX:
-            UNION_SET(aidl, mix,
-                      VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExt(legacy.mix, role)));
-            return aidl;
+            UNION_SET(*aidl, unspecified, false);
+            UNION_SET(*aidlSys, unspecified, false);
+            return OK;
+        case AUDIO_PORT_TYPE_DEVICE: {
+            AudioPortDeviceExt device;
+            media::AudioPortDeviceExtSys deviceSys;
+            RETURN_STATUS_IF_ERROR(
+                    legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+                            legacy.device, &device, &deviceSys));
+            UNION_SET(*aidl, device, device);
+            UNION_SET(*aidlSys, device, deviceSys);
+            return OK;
+        }
+        case AUDIO_PORT_TYPE_MIX: {
+            AudioPortMixExt mix;
+            media::AudioPortMixExtSys mixSys;
+            RETURN_STATUS_IF_ERROR(legacy2aidl_AudioPortMixExt(legacy.mix, role, &mix, &mixSys));
+            UNION_SET(*aidl, mix, mix);
+            UNION_SET(*aidlSys, mix, mixSys);
+            return OK;
+        }
         case AUDIO_PORT_TYPE_SESSION:
-            UNION_SET(aidl, session,
-                      VALUE_OR_RETURN(
-                        legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
-                          legacy.session)));
-            return aidl;
+            UNION_SET(*aidl, session, VALUE_OR_RETURN_STATUS(
+                            legacy2aidl_audio_port_config_session_ext_int32_t(legacy.session)));
+            UNION_SET(*aidlSys, unspecified, false);
+            return OK;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
 ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
         const media::AudioPortConfig& aidl) {
-    audio_port_config legacy;
-    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
-    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
-    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
-    legacy.config_mask = VALUE_OR_RETURN(aidl2legacy_int32_t_config_mask(aidl.configMask));
-    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::SAMPLE_RATE)) {
-        legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sampleRate));
+    audio_port_config legacy{};
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
+    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.sys.type));
+    const bool isInput =
+            VALUE_OR_RETURN(direction(aidl.sys.role, aidl.sys.type)) == Direction::INPUT;
+    if (aidl.hal.sampleRate.has_value()) {
+        legacy.sample_rate = VALUE_OR_RETURN(
+                convertIntegral<unsigned int>(aidl.hal.sampleRate.value().value));
+        legacy.config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
     }
-    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::CHANNEL_MASK)) {
+    if (aidl.hal.channelMask.has_value()) {
         legacy.channel_mask =
-                VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+                VALUE_OR_RETURN(
+                        aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                                aidl.hal.channelMask.value(), isInput));
+        legacy.config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
     }
-    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FORMAT)) {
-        legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    if (aidl.hal.format.has_value()) {
+        legacy.format = VALUE_OR_RETURN(
+                aidl2legacy_AudioFormatDescription_audio_format_t(aidl.hal.format.value()));
+        legacy.config_mask |= AUDIO_PORT_CONFIG_FORMAT;
     }
-    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::GAIN)) {
-        legacy.gain = VALUE_OR_RETURN(
-                aidl2legacy_AudioGainConfig_audio_gain_config(aidl.gain, aidl.role, aidl.type));
+    if (aidl.hal.gain.has_value()) {
+        legacy.gain = VALUE_OR_RETURN(aidl2legacy_AudioGainConfig_audio_gain_config(
+                        aidl.hal.gain.value(), isInput));
+        legacy.config_mask |= AUDIO_PORT_CONFIG_GAIN;
     }
-    if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FLAGS)) {
+    if (aidl.hal.flags.has_value()) {
         legacy.flags = VALUE_OR_RETURN(
-                aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags, aidl.role, aidl.type));
+                aidl2legacy_AudioIoFlags_audio_io_flags(aidl.hal.flags.value(), isInput));
+        legacy.config_mask |= AUDIO_PORT_CONFIG_FLAGS;
     }
-    legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigExt(aidl.ext, aidl.type, aidl.role));
+    legacy.ext = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortExt_audio_port_config_ext(
+                    aidl.hal.ext, aidl.sys.type, aidl.sys.role, aidl.sys.ext));
     return legacy;
 }
 
 ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
         const audio_port_config& legacy) {
     media::AudioPortConfig aidl;
-    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
-    aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
-    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
-    aidl.configMask = VALUE_OR_RETURN(legacy2aidl_config_mask_int32_t(legacy.config_mask));
+    aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+    aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+    const bool isInput = VALUE_OR_RETURN(
+            direction(legacy.role, legacy.type)) == Direction::INPUT;
     if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
-        aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+        Int aidl_sampleRate;
+        aidl_sampleRate.value = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+        aidl.hal.sampleRate = aidl_sampleRate;
     }
     if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
-        aidl.channelMask =
-                VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+        aidl.hal.channelMask = VALUE_OR_RETURN(
+                legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
     }
     if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
-        aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+        aidl.hal.format = VALUE_OR_RETURN(
+                legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
     }
     if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
-        aidl.gain = VALUE_OR_RETURN(legacy2aidl_audio_gain_config_AudioGainConfig(
-                legacy.gain, legacy.role, legacy.type));
+        aidl.hal.gain = VALUE_OR_RETURN(
+                legacy2aidl_audio_gain_config_AudioGainConfig(legacy.gain, isInput));
     }
     if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
-        aidl.flags = VALUE_OR_RETURN(
-                legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, legacy.role, legacy.type));
+        aidl.hal.flags = VALUE_OR_RETURN(
+                legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, isInput));
     }
-    aidl.ext =
-            VALUE_OR_RETURN(legacy2aidl_AudioPortConfigExt(legacy.ext, legacy.type, legacy.role));
+    RETURN_IF_ERROR(legacy2aidl_AudioPortExt(legacy.ext, legacy.type, legacy.role,
+                    &aidl.hal.ext, &aidl.sys.ext));
     return aidl;
 }
 
@@ -1148,33 +2096,40 @@
 
 ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
         const media::AudioIoDescriptor& aidl) {
-    sp<AudioIoDescriptor> legacy(new AudioIoDescriptor());
-    legacy->mIoHandle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
-    legacy->mPatch = VALUE_OR_RETURN(aidl2legacy_AudioPatch_audio_patch(aidl.patch));
-    legacy->mSamplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
-    legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
-    legacy->mChannelMask =
-            VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
-    legacy->mFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
-    legacy->mFrameCountHAL = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCountHAL));
-    legacy->mLatency = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.latency));
-    legacy->mPortId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
-    return legacy;
+    const audio_io_handle_t io_handle = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
+    const struct audio_patch patch = VALUE_OR_RETURN(
+            aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+    const bool isInput = aidl.isInput;
+    const uint32_t sampling_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
+    const audio_format_t format = VALUE_OR_RETURN(
+            aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
+    const audio_channel_mask_t channel_mask = VALUE_OR_RETURN(
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+    const size_t frame_count = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+    const size_t frame_count_hal = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCountHAL));
+    const uint32_t latency = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.latency));
+    const audio_port_handle_t port_id = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
+    return sp<AudioIoDescriptor>::make(io_handle, patch, isInput, sampling_rate, format,
+            channel_mask, frame_count, frame_count_hal, latency, port_id);
 }
 
 ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
         const sp<AudioIoDescriptor>& legacy) {
     media::AudioIoDescriptor aidl;
-    aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->mIoHandle));
-    aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->mPatch));
-    aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mSamplingRate));
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy->mFormat));
-    aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_int32_t(legacy->mChannelMask));
-    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCount));
-    aidl.frameCountHAL = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCountHAL));
-    aidl.latency = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mLatency));
-    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy->mPortId));
+    aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->getIoHandle()));
+    aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->getPatch()));
+    aidl.isInput = legacy->getIsInput();
+    aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->getSamplingRate()));
+    aidl.format = VALUE_OR_RETURN(
+            legacy2aidl_audio_format_t_AudioFormatDescription(legacy->getFormat()));
+    aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+                    legacy->getChannelMask(), legacy->getIsInput()));
+    aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->getFrameCount()));
+    aidl.frameCountHAL = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->getFrameCountHAL()));
+    aidl.latency = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->getLatency()));
+    aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy->getPortId()));
     return aidl;
 }
 
@@ -1195,137 +2150,143 @@
 }
 
 ConversionResult<audio_content_type_t>
-aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl) {
+aidl2legacy_AudioContentType_audio_content_type_t(AudioContentType aidl) {
     switch (aidl) {
-        case media::AudioContentType::UNKNOWN:
+        case AudioContentType::UNKNOWN:
             return AUDIO_CONTENT_TYPE_UNKNOWN;
-        case media::AudioContentType::SPEECH:
+        case AudioContentType::SPEECH:
             return AUDIO_CONTENT_TYPE_SPEECH;
-        case media::AudioContentType::MUSIC:
+        case AudioContentType::MUSIC:
             return AUDIO_CONTENT_TYPE_MUSIC;
-        case media::AudioContentType::MOVIE:
+        case AudioContentType::MOVIE:
             return AUDIO_CONTENT_TYPE_MOVIE;
-        case media::AudioContentType::SONIFICATION:
+        case AudioContentType::SONIFICATION:
             return AUDIO_CONTENT_TYPE_SONIFICATION;
+        case AudioContentType::ULTRASOUND:
+            return AUDIO_CONTENT_TYPE_ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioContentType>
+ConversionResult<AudioContentType>
 legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy) {
     switch (legacy) {
         case AUDIO_CONTENT_TYPE_UNKNOWN:
-            return media::AudioContentType::UNKNOWN;
+            return AudioContentType::UNKNOWN;
         case AUDIO_CONTENT_TYPE_SPEECH:
-            return media::AudioContentType::SPEECH;
+            return AudioContentType::SPEECH;
         case AUDIO_CONTENT_TYPE_MUSIC:
-            return media::AudioContentType::MUSIC;
+            return AudioContentType::MUSIC;
         case AUDIO_CONTENT_TYPE_MOVIE:
-            return media::AudioContentType::MOVIE;
+            return AudioContentType::MOVIE;
         case AUDIO_CONTENT_TYPE_SONIFICATION:
-            return media::AudioContentType::SONIFICATION;
+            return AudioContentType::SONIFICATION;
+        case AUDIO_CONTENT_TYPE_ULTRASOUND:
+            return AudioContentType::ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
 
 ConversionResult<audio_usage_t>
-aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl) {
+aidl2legacy_AudioUsage_audio_usage_t(AudioUsage aidl) {
     switch (aidl) {
-        case media::AudioUsage::UNKNOWN:
+        case AudioUsage::INVALID:
+            break;  // return error
+        case AudioUsage::UNKNOWN:
             return AUDIO_USAGE_UNKNOWN;
-        case media::AudioUsage::MEDIA:
+        case AudioUsage::MEDIA:
             return AUDIO_USAGE_MEDIA;
-        case media::AudioUsage::VOICE_COMMUNICATION:
+        case AudioUsage::VOICE_COMMUNICATION:
             return AUDIO_USAGE_VOICE_COMMUNICATION;
-        case media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
+        case AudioUsage::VOICE_COMMUNICATION_SIGNALLING:
             return AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
-        case media::AudioUsage::ALARM:
+        case AudioUsage::ALARM:
             return AUDIO_USAGE_ALARM;
-        case media::AudioUsage::NOTIFICATION:
+        case AudioUsage::NOTIFICATION:
             return AUDIO_USAGE_NOTIFICATION;
-        case media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
+        case AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE:
             return AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
-        case media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST:
+        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST:
             return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
-        case media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT:
+        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT:
             return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
-        case media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED:
+        case AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED:
             return AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
-        case media::AudioUsage::NOTIFICATION_EVENT:
+        case AudioUsage::NOTIFICATION_EVENT:
             return AUDIO_USAGE_NOTIFICATION_EVENT;
-        case media::AudioUsage::ASSISTANCE_ACCESSIBILITY:
+        case AudioUsage::ASSISTANCE_ACCESSIBILITY:
             return AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
-        case media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
+        case AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE:
             return AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
-        case media::AudioUsage::ASSISTANCE_SONIFICATION:
+        case AudioUsage::ASSISTANCE_SONIFICATION:
             return AUDIO_USAGE_ASSISTANCE_SONIFICATION;
-        case media::AudioUsage::GAME:
+        case AudioUsage::GAME:
             return AUDIO_USAGE_GAME;
-        case media::AudioUsage::VIRTUAL_SOURCE:
+        case AudioUsage::VIRTUAL_SOURCE:
             return AUDIO_USAGE_VIRTUAL_SOURCE;
-        case media::AudioUsage::ASSISTANT:
+        case AudioUsage::ASSISTANT:
             return AUDIO_USAGE_ASSISTANT;
-        case media::AudioUsage::CALL_ASSISTANT:
+        case AudioUsage::CALL_ASSISTANT:
             return AUDIO_USAGE_CALL_ASSISTANT;
-        case media::AudioUsage::EMERGENCY:
+        case AudioUsage::EMERGENCY:
             return AUDIO_USAGE_EMERGENCY;
-        case media::AudioUsage::SAFETY:
+        case AudioUsage::SAFETY:
             return AUDIO_USAGE_SAFETY;
-        case media::AudioUsage::VEHICLE_STATUS:
+        case AudioUsage::VEHICLE_STATUS:
             return AUDIO_USAGE_VEHICLE_STATUS;
-        case media::AudioUsage::ANNOUNCEMENT:
+        case AudioUsage::ANNOUNCEMENT:
             return AUDIO_USAGE_ANNOUNCEMENT;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioUsage>
+ConversionResult<AudioUsage>
 legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy) {
     switch (legacy) {
         case AUDIO_USAGE_UNKNOWN:
-            return media::AudioUsage::UNKNOWN;
+            return AudioUsage::UNKNOWN;
         case AUDIO_USAGE_MEDIA:
-            return media::AudioUsage::MEDIA;
+            return AudioUsage::MEDIA;
         case AUDIO_USAGE_VOICE_COMMUNICATION:
-            return media::AudioUsage::VOICE_COMMUNICATION;
+            return AudioUsage::VOICE_COMMUNICATION;
         case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
-            return media::AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
+            return AudioUsage::VOICE_COMMUNICATION_SIGNALLING;
         case AUDIO_USAGE_ALARM:
-            return media::AudioUsage::ALARM;
+            return AudioUsage::ALARM;
         case AUDIO_USAGE_NOTIFICATION:
-            return media::AudioUsage::NOTIFICATION;
+            return AudioUsage::NOTIFICATION;
         case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
-            return media::AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
+            return AudioUsage::NOTIFICATION_TELEPHONY_RINGTONE;
         case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
-            return media::AudioUsage::NOTIFICATION_COMMUNICATION_REQUEST;
+            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST;
         case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
-            return media::AudioUsage::NOTIFICATION_COMMUNICATION_INSTANT;
+            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT;
         case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
-            return media::AudioUsage::NOTIFICATION_COMMUNICATION_DELAYED;
+            return AudioUsage::SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED;
         case AUDIO_USAGE_NOTIFICATION_EVENT:
-            return media::AudioUsage::NOTIFICATION_EVENT;
+            return AudioUsage::NOTIFICATION_EVENT;
         case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
-            return media::AudioUsage::ASSISTANCE_ACCESSIBILITY;
+            return AudioUsage::ASSISTANCE_ACCESSIBILITY;
         case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
-            return media::AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
+            return AudioUsage::ASSISTANCE_NAVIGATION_GUIDANCE;
         case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
-            return media::AudioUsage::ASSISTANCE_SONIFICATION;
+            return AudioUsage::ASSISTANCE_SONIFICATION;
         case AUDIO_USAGE_GAME:
-            return media::AudioUsage::GAME;
+            return AudioUsage::GAME;
         case AUDIO_USAGE_VIRTUAL_SOURCE:
-            return media::AudioUsage::VIRTUAL_SOURCE;
+            return AudioUsage::VIRTUAL_SOURCE;
         case AUDIO_USAGE_ASSISTANT:
-            return media::AudioUsage::ASSISTANT;
+            return AudioUsage::ASSISTANT;
         case AUDIO_USAGE_CALL_ASSISTANT:
-            return media::AudioUsage::CALL_ASSISTANT;
+            return AudioUsage::CALL_ASSISTANT;
         case AUDIO_USAGE_EMERGENCY:
-            return media::AudioUsage::EMERGENCY;
+            return AudioUsage::EMERGENCY;
         case AUDIO_USAGE_SAFETY:
-            return media::AudioUsage::SAFETY;
+            return AudioUsage::SAFETY;
         case AUDIO_USAGE_VEHICLE_STATUS:
-            return media::AudioUsage::VEHICLE_STATUS;
+            return AudioUsage::VEHICLE_STATUS;
         case AUDIO_USAGE_ANNOUNCEMENT:
-            return media::AudioUsage::ANNOUNCEMENT;
+            return AudioUsage::ANNOUNCEMENT;
     }
     return unexpected(BAD_VALUE);
 }
@@ -1365,6 +2326,8 @@
             return AUDIO_FLAG_CONTENT_SPATIALIZED;
         case media::AudioFlag::NEVER_SPATIALIZE:
             return AUDIO_FLAG_NEVER_SPATIALIZE;
+        case media::AudioFlag::CALL_REDIRECTION:
+            return AUDIO_FLAG_CALL_REDIRECTION;
     }
     return unexpected(BAD_VALUE);
 }
@@ -1406,6 +2369,8 @@
             return media::AudioFlag::CONTENT_SPATIALIZED;
         case AUDIO_FLAG_NEVER_SPATIALIZE:
             return media::AudioFlag::NEVER_SPATIALIZE;
+        case AUDIO_FLAG_CALL_REDIRECTION:
+            return media::AudioFlag::CALL_REDIRECTION;
     }
     return unexpected(BAD_VALUE);
 }
@@ -1431,7 +2396,7 @@
     legacy.content_type = VALUE_OR_RETURN(
             aidl2legacy_AudioContentType_audio_content_type_t(aidl.contentType));
     legacy.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
-    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
     legacy.flags = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_flags_mask_t_mask(aidl.flags));
     RETURN_IF_ERROR(aidl2legacy_string(aidl.tags, legacy.tags, sizeof(legacy.tags)));
     return legacy;
@@ -1443,51 +2408,51 @@
     aidl.contentType = VALUE_OR_RETURN(
             legacy2aidl_audio_content_type_t_AudioContentType(legacy.content_type));
     aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.usage));
-    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source));
     aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_flags_mask_t_int32_t_mask(legacy.flags));
     aidl.tags = VALUE_OR_RETURN(legacy2aidl_string(legacy.tags, sizeof(legacy.tags)));
     return aidl;
 }
 
 ConversionResult<audio_encapsulation_mode_t>
-aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(media::AudioEncapsulationMode aidl) {
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(AudioEncapsulationMode aidl) {
     switch (aidl) {
-        case media::AudioEncapsulationMode::NONE:
+        case AudioEncapsulationMode::INVALID:
+            break;  // return error
+        case AudioEncapsulationMode::NONE:
             return AUDIO_ENCAPSULATION_MODE_NONE;
-        case media::AudioEncapsulationMode::ELEMENTARY_STREAM:
+        case AudioEncapsulationMode::ELEMENTARY_STREAM:
             return AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM;
-        case media::AudioEncapsulationMode::HANDLE:
+        case AudioEncapsulationMode::HANDLE:
             return AUDIO_ENCAPSULATION_MODE_HANDLE;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioEncapsulationMode>
+ConversionResult<AudioEncapsulationMode>
 legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy) {
     switch (legacy) {
         case AUDIO_ENCAPSULATION_MODE_NONE:
-            return media::AudioEncapsulationMode::NONE;
+            return AudioEncapsulationMode::NONE;
         case AUDIO_ENCAPSULATION_MODE_ELEMENTARY_STREAM:
-            return media::AudioEncapsulationMode::ELEMENTARY_STREAM;
+            return AudioEncapsulationMode::ELEMENTARY_STREAM;
         case AUDIO_ENCAPSULATION_MODE_HANDLE:
-            return media::AudioEncapsulationMode::HANDLE;
+            return AudioEncapsulationMode::HANDLE;
     }
     return unexpected(BAD_VALUE);
 }
 
 ConversionResult<audio_offload_info_t>
-aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl) {
-    audio_offload_info_t legacy;
-    legacy.version = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.version));
-    legacy.size = sizeof(audio_offload_info_t);
-    audio_config_base_t config = VALUE_OR_RETURN(
-            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
-    legacy.sample_rate = config.sample_rate;
-    legacy.channel_mask = config.channel_mask;
-    legacy.format = config.format;
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const AudioOffloadInfo& aidl) {
+    audio_offload_info_t legacy = AUDIO_INFO_INITIALIZER;
+    audio_config_base_t base = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.base, false /*isInput*/));
+    legacy.sample_rate = base.sample_rate;
+    legacy.channel_mask = base.channel_mask;
+    legacy.format = base.format;
     legacy.stream_type = VALUE_OR_RETURN(
             aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
-    legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitRate));
+    legacy.bit_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.bitRatePerSecond));
     legacy.duration_us = VALUE_OR_RETURN(convertIntegral<int64_t>(aidl.durationUs));
     legacy.has_video = aidl.hasVideo;
     legacy.is_streaming = aidl.isStreaming;
@@ -1501,21 +2466,20 @@
     return legacy;
 }
 
-ConversionResult<media::AudioOffloadInfo>
+ConversionResult<AudioOffloadInfo>
 legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy) {
-    media::AudioOffloadInfo aidl;
+    AudioOffloadInfo aidl;
     // Version 0.1 fields.
     if (legacy.size < offsetof(audio_offload_info_t, usage) + sizeof(audio_offload_info_t::usage)) {
         return unexpected(BAD_VALUE);
     }
-    aidl.version = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.version));
-    aidl.config.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
-    aidl.config.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
-    aidl.config.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+    const audio_config_base_t base = { .sample_rate = legacy.sample_rate,
+        .channel_mask = legacy.channel_mask, .format = legacy.format };
+    aidl.base = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(
+                    base, false /*isInput*/));
     aidl.streamType = VALUE_OR_RETURN(
             legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream_type));
-    aidl.bitRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
+    aidl.bitRatePerSecond = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.bit_rate));
     aidl.durationUs = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.duration_us));
     aidl.hasVideo = legacy.has_video;
     aidl.isStreaming = legacy.is_streaming;
@@ -1539,25 +2503,25 @@
 }
 
 ConversionResult<audio_config_t>
-aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl) {
-    audio_config_t legacy;
-    legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
-    legacy.channel_mask = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+aidl2legacy_AudioConfig_audio_config_t(const AudioConfig& aidl, bool isInput) {
+    const audio_config_base_t legacyBase = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.base, isInput));
+    audio_config_t legacy = AUDIO_CONFIG_INITIALIZER;
+    legacy.sample_rate = legacyBase.sample_rate;
+    legacy.channel_mask = legacyBase.channel_mask;
+    legacy.format = legacyBase.format;
     legacy.offload_info = VALUE_OR_RETURN(
             aidl2legacy_AudioOffloadInfo_audio_offload_info_t(aidl.offloadInfo));
     legacy.frame_count = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.frameCount));
     return legacy;
 }
 
-ConversionResult<media::AudioConfig>
-legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy) {
-    media::AudioConfig aidl;
-    aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
-    aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ConversionResult<AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput) {
+    const audio_config_base_t base = { .sample_rate = legacy.sample_rate,
+        .channel_mask = legacy.channel_mask, .format = legacy.format };
+    AudioConfig aidl;
+    aidl.base = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(base, isInput));
     aidl.offloadInfo = VALUE_OR_RETURN(
             legacy2aidl_audio_offload_info_t_AudioOffloadInfo(legacy.offload_info));
     aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy.frame_count));
@@ -1565,22 +2529,22 @@
 }
 
 ConversionResult<audio_config_base_t>
-aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl) {
+aidl2legacy_AudioConfigBase_audio_config_base_t(const AudioConfigBase& aidl, bool isInput) {
     audio_config_base_t legacy;
     legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
     legacy.channel_mask = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
     return legacy;
 }
 
-ConversionResult<media::AudioConfigBase>
-legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy) {
-    media::AudioConfigBase aidl;
+ConversionResult<AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput) {
+    AudioConfigBase aidl;
     aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
     aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
     return aidl;
 }
 
@@ -1639,7 +2603,7 @@
 }
 
 ConversionResult<audio_uuid_t>
-aidl2legacy_AudioUuid_audio_uuid_t(const media::AudioUuid& aidl) {
+aidl2legacy_AudioUuid_audio_uuid_t(const AudioUuid& aidl) {
     audio_uuid_t legacy;
     legacy.timeLow = VALUE_OR_RETURN(convertReinterpret<uint32_t>(aidl.timeLow));
     legacy.timeMid = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.timeMid));
@@ -1652,9 +2616,9 @@
     return legacy;
 }
 
-ConversionResult<media::AudioUuid>
+ConversionResult<AudioUuid>
 legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy) {
-    media::AudioUuid aidl;
+    AudioUuid aidl;
     aidl.timeLow = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.timeLow));
     aidl.timeMid = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeMid));
     aidl.timeHiAndVersion = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.timeHiAndVersion));
@@ -1695,28 +2659,28 @@
 
 ConversionResult<audio_encapsulation_metadata_type_t>
 aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
-        media::AudioEncapsulationMetadataType aidl) {
+        AudioEncapsulationMetadataType aidl) {
     switch (aidl) {
-        case media::AudioEncapsulationMetadataType::NONE:
+        case AudioEncapsulationMetadataType::NONE:
             return AUDIO_ENCAPSULATION_METADATA_TYPE_NONE;
-        case media::AudioEncapsulationMetadataType::FRAMEWORK_TUNER:
+        case AudioEncapsulationMetadataType::FRAMEWORK_TUNER:
             return AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER;
-        case media::AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR:
+        case AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR:
             return AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioEncapsulationMetadataType>
+ConversionResult<AudioEncapsulationMetadataType>
 legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
         audio_encapsulation_metadata_type_t legacy) {
     switch (legacy) {
         case AUDIO_ENCAPSULATION_METADATA_TYPE_NONE:
-            return media::AudioEncapsulationMetadataType::NONE;
+            return AudioEncapsulationMetadataType::NONE;
         case AUDIO_ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER:
-            return media::AudioEncapsulationMetadataType::FRAMEWORK_TUNER;
+            return AudioEncapsulationMetadataType::FRAMEWORK_TUNER;
         case AUDIO_ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR:
-            return media::AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR;
+            return AudioEncapsulationMetadataType::DVB_AD_DESCRIPTOR;
     }
     return unexpected(BAD_VALUE);
 }
@@ -1726,9 +2690,9 @@
     return convertBitmask<uint32_t,
             int32_t,
             audio_encapsulation_mode_t,
-            media::AudioEncapsulationMode>(
+            AudioEncapsulationMode>(
             aidl, aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t,
-            indexToEnum_index<media::AudioEncapsulationMode>,
+            indexToEnum_index<AudioEncapsulationMode>,
             enumToMask_index<uint32_t, audio_encapsulation_mode_t>);
 }
 
@@ -1736,11 +2700,11 @@
 legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy) {
     return convertBitmask<int32_t,
             uint32_t,
-            media::AudioEncapsulationMode,
+            AudioEncapsulationMode,
             audio_encapsulation_mode_t>(
             legacy, legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode,
             indexToEnum_index<audio_encapsulation_mode_t>,
-            enumToMask_index<int32_t, media::AudioEncapsulationMode>);
+            enumToMask_index<int32_t, AudioEncapsulationMode>);
 }
 
 ConversionResult<uint32_t>
@@ -1748,9 +2712,9 @@
     return convertBitmask<uint32_t,
             int32_t,
             audio_encapsulation_metadata_type_t,
-            media::AudioEncapsulationMetadataType>(
+            AudioEncapsulationMetadataType>(
             aidl, aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t,
-            indexToEnum_index<media::AudioEncapsulationMetadataType>,
+            indexToEnum_index<AudioEncapsulationMetadataType>,
             enumToMask_index<uint32_t, audio_encapsulation_metadata_type_t>);
 }
 
@@ -1758,104 +2722,79 @@
 legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy) {
     return convertBitmask<int32_t,
             uint32_t,
-            media::AudioEncapsulationMetadataType,
+            AudioEncapsulationMetadataType,
             audio_encapsulation_metadata_type_t>(
             legacy, legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType,
             indexToEnum_index<audio_encapsulation_metadata_type_t>,
-            enumToMask_index<int32_t, media::AudioEncapsulationMetadataType>);
-}
-
-ConversionResult<audio_mix_latency_class_t>
-aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(
-        media::AudioMixLatencyClass aidl) {
-    switch (aidl) {
-        case media::AudioMixLatencyClass::LOW:
-            return AUDIO_LATENCY_LOW;
-        case media::AudioMixLatencyClass::NORMAL:
-            return AUDIO_LATENCY_NORMAL;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioMixLatencyClass>
-legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(
-        audio_mix_latency_class_t legacy) {
-    switch (legacy) {
-        case AUDIO_LATENCY_LOW:
-            return media::AudioMixLatencyClass::LOW;
-        case AUDIO_LATENCY_NORMAL:
-            return media::AudioMixLatencyClass::NORMAL;
-    }
-    return unexpected(BAD_VALUE);
+            enumToMask_index<int32_t, AudioEncapsulationMetadataType>);
 }
 
 ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const media::AudioPortDeviceExt& aidl) {
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+        const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlSys) {
     audio_port_device_ext legacy;
-    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
-    legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.device.type));
-    RETURN_IF_ERROR(
-            aidl2legacy_string(aidl.device.address, legacy.address, sizeof(legacy.address)));
+    legacy.hw_module = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl.device, &legacy.type, legacy.address));
     legacy.encapsulation_modes = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMode_mask(aidl.encapsulationModes));
+            aidl2legacy_AudioEncapsulationMode_mask(aidlSys.encapsulationModes));
     legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
-            aidl2legacy_AudioEncapsulationMetadataType_mask(aidl.encapsulationMetadataTypes));
+            aidl2legacy_AudioEncapsulationMetadataType_mask(
+                    aidlSys.encapsulationMetadataTypes));
     return legacy;
 }
 
-ConversionResult<media::AudioPortDeviceExt>
-legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(const audio_port_device_ext& legacy) {
-    media::AudioPortDeviceExt aidl;
-    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl.device.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
-    aidl.device.address = VALUE_OR_RETURN(
-            legacy2aidl_string(legacy.address, sizeof(legacy.address)));
-    aidl.encapsulationModes = VALUE_OR_RETURN(
+status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+        const audio_port_device_ext& legacy,
+        AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+    aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl->device = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+    aidlDeviceExt->encapsulationModes = VALUE_OR_RETURN_STATUS(
             legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
-    aidl.encapsulationMetadataTypes = VALUE_OR_RETURN(
+    aidlDeviceExt->encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
             legacy2aidl_AudioEncapsulationMetadataType_mask(legacy.encapsulation_metadata_types));
-    return aidl;
+    return OK;
 }
 
 ConversionResult<audio_port_mix_ext>
-aidl2legacy_AudioPortMixExt_audio_port_mix_ext(const media::AudioPortMixExt& aidl) {
-    audio_port_mix_ext legacy;
-    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidl.hwModule));
+aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+        const AudioPortMixExt& aidl, const media::AudioPortMixExtSys& aidlSys) {
+    audio_port_mix_ext legacy{};
+    legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
     legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
-    legacy.latency_class = VALUE_OR_RETURN(
-            aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(aidl.latencyClass));
     return legacy;
 }
 
-ConversionResult<media::AudioPortMixExt>
-legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy) {
-    media::AudioPortMixExt aidl;
-    aidl.hwModule = VALUE_OR_RETURN(legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
-    aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
-    aidl.latencyClass = VALUE_OR_RETURN(
-            legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(legacy.latency_class));
-    return aidl;
+status_t
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy,
+        AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt) {
+    aidlMixExt->hwModule = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
+    aidl->handle = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+    return OK;
 }
 
 ConversionResult<audio_port_session_ext>
-aidl2legacy_AudioPortSessionExt_audio_port_session_ext(const media::AudioPortSessionExt& aidl) {
+aidl2legacy_int32_t_audio_port_session_ext(int32_t aidl) {
     audio_port_session_ext legacy;
-    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
+    legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl));
     return legacy;
 }
 
-ConversionResult<media::AudioPortSessionExt>
-legacy2aidl_audio_port_session_ext_AudioPortSessionExt(const audio_port_session_ext& legacy) {
-    media::AudioPortSessionExt aidl;
-    aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
-    return aidl;
+ConversionResult<int32_t>
+legacy2aidl_audio_port_session_ext_int32_t(const audio_port_session_ext& legacy) {
+    return legacy2aidl_audio_session_t_int32_t(legacy.session);
 }
 
 // This type is unnamed in the original definition, thus we name it here.
 using audio_port_v7_ext = decltype(audio_port_v7::ext);
 
-ConversionResult<audio_port_v7_ext> aidl2legacy_AudioPortExt(
-        const media::AudioPortExt& aidl, media::AudioPortType type) {
+ConversionResult<audio_port_v7_ext> aidl2legacy_AudioPortExt_audio_port_v7_ext(
+        const AudioPortExt& aidl, media::AudioPortType type,
+        const media::AudioPortExtSys& aidlSys) {
     audio_port_v7_ext legacy;
     switch (type) {
         case media::AudioPortType::NONE:
@@ -1865,66 +2804,83 @@
         case media::AudioPortType::DEVICE:
             legacy.device = VALUE_OR_RETURN(
                     aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, device))));
+                            VALUE_OR_RETURN(UNION_GET(aidl, device)),
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, device))));
             return legacy;
         case media::AudioPortType::MIX:
             legacy.mix = VALUE_OR_RETURN(
                     aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
-                            VALUE_OR_RETURN(UNION_GET(aidl, mix))));
+                            VALUE_OR_RETURN(UNION_GET(aidl, mix)),
+                            VALUE_OR_RETURN(UNION_GET(aidlSys, mix))));
             return legacy;
         case media::AudioPortType::SESSION:
-            legacy.session = VALUE_OR_RETURN(aidl2legacy_AudioPortSessionExt_audio_port_session_ext(
-                    VALUE_OR_RETURN(UNION_GET(aidl, session))));
+            legacy.session = VALUE_OR_RETURN(
+                    aidl2legacy_int32_t_audio_port_session_ext(
+                            VALUE_OR_RETURN(UNION_GET(aidl, session))));
             return legacy;
 
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
-ConversionResult<media::AudioPortExt> legacy2aidl_AudioPortExt(
-        const audio_port_v7_ext& legacy, audio_port_type_t type) {
-    media::AudioPortExt aidl;
+status_t legacy2aidl_AudioPortExt(
+        const audio_port_v7_ext& legacy, audio_port_type_t type,
+        AudioPortExt* aidl, media::AudioPortExtSys* aidlSys) {
     switch (type) {
         case AUDIO_PORT_TYPE_NONE:
-            UNION_SET(aidl, unspecified, false);
-            return aidl;
-        case AUDIO_PORT_TYPE_DEVICE:
-            UNION_SET(aidl, device,
-                      VALUE_OR_RETURN(
-                              legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy.device)));
-            return aidl;
-        case AUDIO_PORT_TYPE_MIX:
-            UNION_SET(aidl, mix,
-                      VALUE_OR_RETURN(legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy.mix)));
-            return aidl;
+            UNION_SET(*aidl, unspecified, false);
+            UNION_SET(*aidlSys, unspecified, false);
+            return OK;
+        case AUDIO_PORT_TYPE_DEVICE: {
+            AudioPortDeviceExt device;
+            media::AudioPortDeviceExtSys deviceSys;
+            RETURN_STATUS_IF_ERROR(
+                    legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+                            legacy.device, &device, &deviceSys));
+            UNION_SET(*aidl, device, device);
+            UNION_SET(*aidlSys, device, deviceSys);
+            return OK;
+        }
+        case AUDIO_PORT_TYPE_MIX: {
+            AudioPortMixExt mix;
+            media::AudioPortMixExtSys mixSys;
+            RETURN_STATUS_IF_ERROR(
+                    legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+                            legacy.mix, &mix, &mixSys));
+            UNION_SET(*aidl, mix, mix);
+            UNION_SET(*aidlSys, mix, mixSys);
+            return OK;
+        }
         case AUDIO_PORT_TYPE_SESSION:
-            UNION_SET(aidl, session,
-                      VALUE_OR_RETURN(legacy2aidl_audio_port_session_ext_AudioPortSessionExt(
-                              legacy.session)));
-            return aidl;
+            UNION_SET(*aidl, session, VALUE_OR_RETURN_STATUS(
+                            legacy2aidl_audio_port_session_ext_int32_t(legacy.session)));
+            UNION_SET(*aidlSys, unspecified, false);
+            return OK;
     }
     LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
 }
 
 ConversionResult<audio_profile>
-aidl2legacy_AudioProfile_audio_profile(const media::AudioProfile& aidl) {
+aidl2legacy_AudioProfile_audio_profile(const AudioProfile& aidl, bool isInput) {
     audio_profile legacy;
-    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+    legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
 
-    if (aidl.samplingRates.size() > std::size(legacy.sample_rates)) {
+    if (aidl.sampleRates.size() > std::size(legacy.sample_rates)) {
         return unexpected(BAD_VALUE);
     }
     RETURN_IF_ERROR(
-            convertRange(aidl.samplingRates.begin(), aidl.samplingRates.end(), legacy.sample_rates,
+            convertRange(aidl.sampleRates.begin(), aidl.sampleRates.end(), legacy.sample_rates,
                          convertIntegral<int32_t, unsigned int>));
-    legacy.num_sample_rates = aidl.samplingRates.size();
+    legacy.num_sample_rates = aidl.sampleRates.size();
 
     if (aidl.channelMasks.size() > std::size(legacy.channel_masks)) {
         return unexpected(BAD_VALUE);
     }
     RETURN_IF_ERROR(
             convertRange(aidl.channelMasks.begin(), aidl.channelMasks.end(), legacy.channel_masks,
-                         aidl2legacy_int32_t_audio_channel_mask_t));
+                    [isInput](const AudioChannelLayout& l) {
+                        return aidl2legacy_AudioChannelLayout_audio_channel_mask_t(l, isInput);
+                    }));
     legacy.num_channel_masks = aidl.channelMasks.size();
 
     legacy.encapsulation_type = VALUE_OR_RETURN(
@@ -1932,17 +2888,17 @@
     return legacy;
 }
 
-ConversionResult<media::AudioProfile>
-legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy) {
-    media::AudioProfile aidl;
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ConversionResult<AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy, bool isInput) {
+    AudioProfile aidl;
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
 
     if (legacy.num_sample_rates > std::size(legacy.sample_rates)) {
         return unexpected(BAD_VALUE);
     }
     RETURN_IF_ERROR(
             convertRange(legacy.sample_rates, legacy.sample_rates + legacy.num_sample_rates,
-                         std::back_inserter(aidl.samplingRates),
+                         std::back_inserter(aidl.sampleRates),
                          convertIntegral<unsigned int, int32_t>));
 
     if (legacy.num_channel_masks > std::size(legacy.channel_masks)) {
@@ -1951,7 +2907,9 @@
     RETURN_IF_ERROR(
             convertRange(legacy.channel_masks, legacy.channel_masks + legacy.num_channel_masks,
                          std::back_inserter(aidl.channelMasks),
-                         legacy2aidl_audio_channel_mask_t_int32_t));
+                    [isInput](audio_channel_mask_t m) {
+                        return legacy2aidl_audio_channel_mask_t_AudioChannelLayout(m, isInput);
+                    }));
 
     aidl.encapsulationType = VALUE_OR_RETURN(
             legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
@@ -1960,11 +2918,11 @@
 }
 
 ConversionResult<audio_gain>
-aidl2legacy_AudioGain_audio_gain(const media::AudioGain& aidl) {
+aidl2legacy_AudioGain_audio_gain(const AudioGain& aidl, bool isInput) {
     audio_gain legacy;
     legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t_mask(aidl.mode));
-    legacy.channel_mask = VALUE_OR_RETURN(
-            aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+    legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                    aidl.channelMask, isInput));
     legacy.min_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.minValue));
     legacy.max_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.maxValue));
     legacy.default_value = VALUE_OR_RETURN(convertIntegral<int>(aidl.defaultValue));
@@ -1974,12 +2932,12 @@
     return legacy;
 }
 
-ConversionResult<media::AudioGain>
-legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy) {
-    media::AudioGain aidl;
+ConversionResult<AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput) {
+    AudioGain aidl;
     aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t_mask(legacy.mode));
     aidl.channelMask = VALUE_OR_RETURN(
-            legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
     aidl.minValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.min_value));
     aidl.maxValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.max_value));
     aidl.defaultValue = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.default_value));
@@ -1992,63 +2950,76 @@
 ConversionResult<audio_port_v7>
 aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl) {
     audio_port_v7 legacy;
-    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
-    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
-    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
-    RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+    legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
+    legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
+    legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.sys.type));
+    RETURN_IF_ERROR(aidl2legacy_string(aidl.hal.name, legacy.name, sizeof(legacy.name)));
 
-    if (aidl.profiles.size() > std::size(legacy.audio_profiles)) {
+    if (aidl.hal.profiles.size() > std::size(legacy.audio_profiles)) {
         return unexpected(BAD_VALUE);
     }
-    RETURN_IF_ERROR(convertRange(aidl.profiles.begin(), aidl.profiles.end(), legacy.audio_profiles,
-                                 aidl2legacy_AudioProfile_audio_profile));
-    legacy.num_audio_profiles = aidl.profiles.size();
+    const bool isInput =
+            VALUE_OR_RETURN(direction(aidl.sys.role, aidl.sys.type)) == Direction::INPUT;
+    RETURN_IF_ERROR(convertRange(
+                    aidl.hal.profiles.begin(), aidl.hal.profiles.end(), legacy.audio_profiles,
+                    [isInput](const AudioProfile& p) {
+                        return aidl2legacy_AudioProfile_audio_profile(p, isInput);
+                    }));
+    legacy.num_audio_profiles = aidl.hal.profiles.size();
 
-    if (aidl.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
+    if (aidl.hal.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
         return unexpected(BAD_VALUE);
     }
     RETURN_IF_ERROR(
-            convertRange(aidl.extraAudioDescriptors.begin(), aidl.extraAudioDescriptors.end(),
-                         legacy.extra_audio_descriptors,
-                         aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
-    legacy.num_extra_audio_descriptors = aidl.extraAudioDescriptors.size();
+            convertRange(
+                    aidl.hal.extraAudioDescriptors.begin(), aidl.hal.extraAudioDescriptors.end(),
+                    legacy.extra_audio_descriptors,
+                    aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
+    legacy.num_extra_audio_descriptors = aidl.hal.extraAudioDescriptors.size();
 
-    if (aidl.gains.size() > std::size(legacy.gains)) {
+    if (aidl.hal.gains.size() > std::size(legacy.gains)) {
         return unexpected(BAD_VALUE);
     }
-    RETURN_IF_ERROR(convertRange(aidl.gains.begin(), aidl.gains.end(), legacy.gains,
-                                 aidl2legacy_AudioGain_audio_gain));
-    legacy.num_gains = aidl.gains.size();
+    RETURN_IF_ERROR(convertRange(aidl.hal.gains.begin(), aidl.hal.gains.end(), legacy.gains,
+                                 [isInput](const AudioGain& g) {
+                                     return aidl2legacy_AudioGain_audio_gain(g, isInput);
+                                 }));
+    legacy.num_gains = aidl.hal.gains.size();
 
     legacy.active_config = VALUE_OR_RETURN(
-            aidl2legacy_AudioPortConfig_audio_port_config(aidl.activeConfig));
-    legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortExt(aidl.ext, aidl.type));
+            aidl2legacy_AudioPortConfig_audio_port_config(aidl.sys.activeConfig));
+    legacy.ext = VALUE_OR_RETURN(
+            aidl2legacy_AudioPortExt_audio_port_v7_ext(aidl.hal.ext, aidl.sys.type, aidl.sys.ext));
     return legacy;
 }
 
 ConversionResult<media::AudioPort>
 legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy) {
     media::AudioPort aidl;
-    aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
-    aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
-    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
-    aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+    aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+    aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+    aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+    aidl.hal.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
 
     if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
         return unexpected(BAD_VALUE);
     }
+    const bool isInput = VALUE_OR_RETURN(direction(legacy.role, legacy.type)) == Direction::INPUT;
     RETURN_IF_ERROR(
             convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
-                         std::back_inserter(aidl.profiles),
-                         legacy2aidl_audio_profile_AudioProfile));
+                         std::back_inserter(aidl.hal.profiles),
+                         [isInput](const audio_profile& p) {
+                             return legacy2aidl_audio_profile_AudioProfile(p, isInput);
+                         }));
 
     if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
         return unexpected(BAD_VALUE);
     }
+    aidl.sys.profiles.resize(legacy.num_audio_profiles);
     RETURN_IF_ERROR(
             convertRange(legacy.extra_audio_descriptors,
                     legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
-                    std::back_inserter(aidl.extraAudioDescriptors),
+                    std::back_inserter(aidl.hal.extraAudioDescriptors),
                     legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
 
     if (legacy.num_gains > std::size(legacy.gains)) {
@@ -2056,53 +3027,66 @@
     }
     RETURN_IF_ERROR(
             convertRange(legacy.gains, legacy.gains + legacy.num_gains,
-                         std::back_inserter(aidl.gains),
-                         legacy2aidl_audio_gain_AudioGain));
+                         std::back_inserter(aidl.hal.gains),
+                         [isInput](const audio_gain& g) {
+                             return legacy2aidl_audio_gain_AudioGain(g, isInput);
+                         }));
+    aidl.sys.gains.resize(legacy.num_gains);
 
-    aidl.activeConfig = VALUE_OR_RETURN(
+    aidl.sys.activeConfig = VALUE_OR_RETURN(
             legacy2aidl_audio_port_config_AudioPortConfig(legacy.active_config));
-    aidl.ext = VALUE_OR_RETURN(legacy2aidl_AudioPortExt(legacy.ext, legacy.type));
+    aidl.sys.activeConfig.hal.portId = aidl.hal.id;
+    RETURN_IF_ERROR(
+            legacy2aidl_AudioPortExt(legacy.ext, legacy.type, &aidl.hal.ext, &aidl.sys.ext));
     return aidl;
 }
 
 ConversionResult<audio_mode_t>
-aidl2legacy_AudioMode_audio_mode_t(media::AudioMode aidl) {
+aidl2legacy_AudioMode_audio_mode_t(AudioMode aidl) {
     switch (aidl) {
-        case media::AudioMode::INVALID:
+        case AudioMode::SYS_RESERVED_INVALID:
             return AUDIO_MODE_INVALID;
-        case media::AudioMode::CURRENT:
+        case AudioMode::SYS_RESERVED_CURRENT:
             return AUDIO_MODE_CURRENT;
-        case media::AudioMode::NORMAL:
+        case AudioMode::NORMAL:
             return AUDIO_MODE_NORMAL;
-        case media::AudioMode::RINGTONE:
+        case AudioMode::RINGTONE:
             return AUDIO_MODE_RINGTONE;
-        case media::AudioMode::IN_CALL:
+        case AudioMode::IN_CALL:
             return AUDIO_MODE_IN_CALL;
-        case media::AudioMode::IN_COMMUNICATION:
+        case AudioMode::IN_COMMUNICATION:
             return AUDIO_MODE_IN_COMMUNICATION;
-        case media::AudioMode::CALL_SCREEN:
+        case AudioMode::CALL_SCREEN:
             return AUDIO_MODE_CALL_SCREEN;
+        case AudioMode::SYS_RESERVED_CALL_REDIRECT:
+            return AUDIO_MODE_CALL_REDIRECT;
+        case AudioMode::SYS_RESERVED_COMMUNICATION_REDIRECT:
+            return AUDIO_MODE_COMMUNICATION_REDIRECT;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioMode>
+ConversionResult<AudioMode>
 legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy) {
     switch (legacy) {
         case AUDIO_MODE_INVALID:
-            return media::AudioMode::INVALID;
+            return AudioMode::SYS_RESERVED_INVALID;
         case AUDIO_MODE_CURRENT:
-            return media::AudioMode::CURRENT;
+            return AudioMode::SYS_RESERVED_CURRENT;
         case AUDIO_MODE_NORMAL:
-            return media::AudioMode::NORMAL;
+            return AudioMode::NORMAL;
         case AUDIO_MODE_RINGTONE:
-            return media::AudioMode::RINGTONE;
+            return AudioMode::RINGTONE;
         case AUDIO_MODE_IN_CALL:
-            return media::AudioMode::IN_CALL;
+            return AudioMode::IN_CALL;
         case AUDIO_MODE_IN_COMMUNICATION:
-            return media::AudioMode::IN_COMMUNICATION;
+            return AudioMode::IN_COMMUNICATION;
         case AUDIO_MODE_CALL_SCREEN:
-            return media::AudioMode::CALL_SCREEN;
+            return AudioMode::CALL_SCREEN;
+        case AUDIO_MODE_CALL_REDIRECT:
+            return AudioMode::SYS_RESERVED_CALL_REDIRECT;
+        case AUDIO_MODE_COMMUNICATION_REDIRECT:
+            return AudioMode::SYS_RESERVED_COMMUNICATION_REDIRECT;
         case AUDIO_MODE_CNT:
             break;
     }
@@ -2252,30 +3236,30 @@
 }
 
 ConversionResult<audio_standard_t>
-aidl2legacy_AudioStandard_audio_standard_t(media::AudioStandard aidl) {
+aidl2legacy_AudioStandard_audio_standard_t(AudioStandard aidl) {
     switch (aidl) {
-        case media::AudioStandard::NONE:
+        case AudioStandard::NONE:
             return AUDIO_STANDARD_NONE;
-        case media::AudioStandard::EDID:
+        case AudioStandard::EDID:
             return AUDIO_STANDARD_EDID;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioStandard>
+ConversionResult<AudioStandard>
 legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy) {
     switch (legacy) {
         case AUDIO_STANDARD_NONE:
-            return media::AudioStandard::NONE;
+            return AudioStandard::NONE;
         case AUDIO_STANDARD_EDID:
-            return media::AudioStandard::EDID;
+            return AudioStandard::EDID;
     }
     return unexpected(BAD_VALUE);
 }
 
 ConversionResult<audio_extra_audio_descriptor>
 aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
-        const media::ExtraAudioDescriptor& aidl) {
+        const ExtraAudioDescriptor& aidl) {
     audio_extra_audio_descriptor legacy;
     legacy.standard = VALUE_OR_RETURN(aidl2legacy_AudioStandard_audio_standard_t(aidl.standard));
     if (aidl.audioDescriptor.size() > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
@@ -2290,10 +3274,10 @@
     return legacy;
 }
 
-ConversionResult<media::ExtraAudioDescriptor>
+ConversionResult<ExtraAudioDescriptor>
 legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
         const audio_extra_audio_descriptor& legacy) {
-    media::ExtraAudioDescriptor aidl;
+    ExtraAudioDescriptor aidl;
     aidl.standard = VALUE_OR_RETURN(legacy2aidl_audio_standard_t_AudioStandard(legacy.standard));
     if (legacy.descriptor_length > EXTRA_AUDIO_DESCRIPTOR_SIZE) {
         return unexpected(BAD_VALUE);
@@ -2309,24 +3293,24 @@
 
 ConversionResult<audio_encapsulation_type_t>
 aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-        const media::AudioEncapsulationType& aidl) {
+        const AudioEncapsulationType& aidl) {
     switch (aidl) {
-        case media::AudioEncapsulationType::NONE:
+        case AudioEncapsulationType::NONE:
             return AUDIO_ENCAPSULATION_TYPE_NONE;
-        case media::AudioEncapsulationType::IEC61937:
+        case AudioEncapsulationType::IEC61937:
             return AUDIO_ENCAPSULATION_TYPE_IEC61937;
     }
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<media::AudioEncapsulationType>
+ConversionResult<AudioEncapsulationType>
 legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
         const audio_encapsulation_type_t & legacy) {
     switch (legacy) {
         case AUDIO_ENCAPSULATION_TYPE_NONE:
-            return media::AudioEncapsulationType::NONE;
+            return AudioEncapsulationType::NONE;
         case AUDIO_ENCAPSULATION_TYPE_IEC61937:
-            return media::AudioEncapsulationType::IEC61937;
+            return AudioEncapsulationType::IEC61937;
     }
     return unexpected(BAD_VALUE);
 }
@@ -2355,4 +3339,53 @@
     return trackSecondaryOutputInfo;
 }
 
+ConversionResult<audio_direct_mode_t>
+aidl2legacy_AudioDirectMode_audio_direct_mode_t(media::AudioDirectMode aidl) {
+    switch (aidl) {
+        case media::AudioDirectMode::NONE:
+            return AUDIO_DIRECT_NOT_SUPPORTED;
+        case media::AudioDirectMode::OFFLOAD:
+            return AUDIO_DIRECT_OFFLOAD_SUPPORTED;
+        case media::AudioDirectMode::OFFLOAD_GAPLESS:
+            return AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED;
+        case media::AudioDirectMode::BITSTREAM:
+            return AUDIO_DIRECT_BITSTREAM_SUPPORTED;
+    }
+    return unexpected(BAD_VALUE);
+}
+ConversionResult<media::AudioDirectMode>
+legacy2aidl_audio_direct_mode_t_AudioDirectMode(audio_direct_mode_t legacy) {
+    switch (legacy) {
+        case AUDIO_DIRECT_NOT_SUPPORTED:
+            return media::AudioDirectMode::NONE;
+        case AUDIO_DIRECT_OFFLOAD_SUPPORTED:
+            return media::AudioDirectMode::OFFLOAD;
+        case AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED:
+            return media::AudioDirectMode::OFFLOAD_GAPLESS;
+        case AUDIO_DIRECT_BITSTREAM_SUPPORTED:
+            return media::AudioDirectMode::BITSTREAM;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_direct_mode_t> aidl2legacy_int32_t_audio_direct_mode_t_mask(int32_t aidl) {
+    using LegacyMask = std::underlying_type_t<audio_direct_mode_t>;
+
+    LegacyMask converted = VALUE_OR_RETURN(
+            (convertBitmask<LegacyMask, int32_t, audio_direct_mode_t, media::AudioDirectMode>(
+                    aidl, aidl2legacy_AudioDirectMode_audio_direct_mode_t,
+                    indexToEnum_index<media::AudioDirectMode>,
+                    enumToMask_bitmask<LegacyMask, audio_direct_mode_t>)));
+    return static_cast<audio_direct_mode_t>(converted);
+}
+ConversionResult<int32_t> legacy2aidl_audio_direct_mode_t_int32_t_mask(audio_direct_mode_t legacy) {
+    using LegacyMask = std::underlying_type_t<audio_direct_mode_t>;
+
+    LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+    return convertBitmask<int32_t, LegacyMask, media::AudioDirectMode, audio_direct_mode_t>(
+            legacyMask, legacy2aidl_audio_direct_mode_t_AudioDirectMode,
+            indexToEnum_bitmask<audio_direct_mode_t>,
+            enumToMask_index<int32_t, media::AudioDirectMode>);
+}
+
 }  // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 3c3d340..69a9c68 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -11,6 +11,10 @@
     name: "libaudioclient_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
     host_supported: true,
 
     header_libs: [
@@ -41,7 +45,7 @@
     },
 }
 
-cc_library_shared {
+cc_library {
     name: "libaudiopolicy",
     srcs: [
         "AudioAttributes.cpp",
@@ -51,6 +55,7 @@
         "PolicyAidlConversion.cpp"
     ],
     shared_libs: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -71,6 +76,7 @@
     include_dirs: ["system/media/audio_utils/include"],
     export_include_dirs: ["include"],
     export_shared_lib_headers: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -111,6 +117,7 @@
         "TrackPlayerBase.cpp",
     ],
     shared_libs: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -134,8 +141,8 @@
         "libprocessgroup",
         "libshmemcompat",
         "libutils",
-        "libvibrator",
         "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
     export_shared_lib_headers: [
         "audioflinger-aidl-cpp",
@@ -143,6 +150,7 @@
         "spatializer-aidl-cpp",
         "framework-permission-aidl-cpp",
         "libbinder",
+        "libmediametrics",
     ],
 
     include_dirs: [
@@ -195,13 +203,15 @@
     ],
     header_libs: [
         "libbase_headers",
+        "liberror_headers",
     ],
     export_header_lib_headers: [
         "libbase_headers",
+        "liberror_headers",
     ],
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth.updatable",
+        "com.android.bluetooth",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -228,16 +238,19 @@
         "libaudioclient_aidl_conversion_util",
     ],
     shared_libs: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libbase",
         "libbinder",
         "liblog",
         "libshmemcompat",
+        "libstagefright_foundation",
         "libutils",
         "shared-file-region-aidl-cpp",
         "framework-permission-aidl-cpp",
     ],
     export_shared_lib_headers: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libbase",
         "shared-file-region-aidl-cpp",
@@ -307,57 +320,32 @@
     srcs: [
         "aidl/android/media/AudioAttributesInternal.aidl",
         "aidl/android/media/AudioClient.aidl",
-        "aidl/android/media/AudioConfig.aidl",
-        "aidl/android/media/AudioConfigBase.aidl",
-        "aidl/android/media/AudioContentType.aidl",
-        "aidl/android/media/AudioDevice.aidl",
+        "aidl/android/media/AudioDirectMode.aidl",
         "aidl/android/media/AudioDualMonoMode.aidl",
-        "aidl/android/media/AudioEncapsulationMode.aidl",
-        "aidl/android/media/AudioEncapsulationMetadataType.aidl",
-        "aidl/android/media/AudioEncapsulationType.aidl",
         "aidl/android/media/AudioFlag.aidl",
-        "aidl/android/media/AudioGain.aidl",
-        "aidl/android/media/AudioGainConfig.aidl",
-        "aidl/android/media/AudioGainMode.aidl",
-        "aidl/android/media/AudioInputFlags.aidl",
+        "aidl/android/media/AudioGainSys.aidl",
         "aidl/android/media/AudioIoConfigEvent.aidl",
         "aidl/android/media/AudioIoDescriptor.aidl",
-        "aidl/android/media/AudioIoFlags.aidl",
-        "aidl/android/media/AudioMixLatencyClass.aidl",
-        "aidl/android/media/AudioMode.aidl",
-        "aidl/android/media/AudioOffloadInfo.aidl",
-        "aidl/android/media/AudioOutputFlags.aidl",
         "aidl/android/media/AudioPatch.aidl",
         "aidl/android/media/AudioPlaybackRate.aidl",
         "aidl/android/media/AudioPort.aidl",
+        "aidl/android/media/AudioPortSys.aidl",
         "aidl/android/media/AudioPortConfig.aidl",
-        "aidl/android/media/AudioPortConfigType.aidl",
-        "aidl/android/media/AudioPortConfigDeviceExt.aidl",
-        "aidl/android/media/AudioPortConfigExt.aidl",
-        "aidl/android/media/AudioPortConfigMixExt.aidl",
-        "aidl/android/media/AudioPortConfigMixExtUseCase.aidl",
-        "aidl/android/media/AudioPortConfigSessionExt.aidl",
-        "aidl/android/media/AudioPortDeviceExt.aidl",
-        "aidl/android/media/AudioPortExt.aidl",
-        "aidl/android/media/AudioPortMixExt.aidl",
+        "aidl/android/media/AudioPortConfigSys.aidl",
+        "aidl/android/media/AudioPortDeviceExtSys.aidl",
+        "aidl/android/media/AudioPortExtSys.aidl",
+        "aidl/android/media/AudioPortMixExtSys.aidl",
         "aidl/android/media/AudioPortRole.aidl",
-        "aidl/android/media/AudioPortSessionExt.aidl",
         "aidl/android/media/AudioPortType.aidl",
-        "aidl/android/media/AudioProfile.aidl",
-        "aidl/android/media/AudioSourceType.aidl",
-        "aidl/android/media/AudioStandard.aidl",
-        "aidl/android/media/AudioStreamType.aidl",
+        "aidl/android/media/AudioProfileSys.aidl",
         "aidl/android/media/AudioTimestampInternal.aidl",
         "aidl/android/media/AudioUniqueIdUse.aidl",
-        "aidl/android/media/AudioUsage.aidl",
-        "aidl/android/media/AudioUuid.aidl",
         "aidl/android/media/AudioVibratorInfo.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
-        "aidl/android/media/ExtraAudioDescriptor.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
     ],
     imports: [
-        "audio_common-aidl",
+        "android.media.audio.common.types-V1",
         "framework-permission-aidl",
     ],
     backend: {
@@ -368,6 +356,9 @@
                 "com.android.media",
             ],
         },
+        java: {
+            sdk_version: "module_current",
+        },
     },
 }
 aidl_interface {
@@ -398,7 +389,7 @@
         "aidl/android/media/SpatializerHeadTrackingMode.aidl",
     ],
     imports: [
-        "audio_common-aidl",
+        "android.media.audio.common.types-V1",
         "audioclient-types-aidl",
     ],
     backend: {
@@ -409,6 +400,9 @@
                 "com.android.media",
             ],
         },
+        java: {
+            sdk_version: "module_current",
+        },
     },
 }
 
@@ -438,7 +432,7 @@
         "aidl/android/media/IAudioTrackCallback.aidl",
     ],
     imports: [
-        "audio_common-aidl",
+        "android.media.audio.common.types-V1",
         "audioclient-types-aidl",
         "av-types-aidl",
         "effect-aidl",
@@ -454,6 +448,9 @@
                 "com.android.media",
             ],
         },
+        java: {
+            sdk_version: "module_current",
+        },
     },
 }
 
@@ -467,13 +464,12 @@
         "aidl/android/media/GetInputForAttrResponse.aidl",
         "aidl/android/media/GetOutputForAttrResponse.aidl",
         "aidl/android/media/GetSpatializerResponse.aidl",
-        "aidl/android/media/Int.aidl",
         "aidl/android/media/RecordClientInfo.aidl",
         "aidl/android/media/IAudioPolicyService.aidl",
         "aidl/android/media/IAudioPolicyServiceClient.aidl",
     ],
     imports: [
-        "audio_common-aidl",
+        "android.media.audio.common.types-V1",
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
@@ -490,6 +486,9 @@
                 "com.android.media",
             ],
         },
+        java: {
+            sdk_version: "module_current",
+        },
     },
 }
 
@@ -517,5 +516,8 @@
                 "com.android.media",
             ],
         },
+        java: {
+            sdk_version: "module_current",
+        },
     },
 }
diff --git a/media/libaudioclient/AudioAttributes.cpp b/media/libaudioclient/AudioAttributes.cpp
index 83bf5a7..260c06c 100644
--- a/media/libaudioclient/AudioAttributes.cpp
+++ b/media/libaudioclient/AudioAttributes.cpp
@@ -24,9 +24,6 @@
 #include <media/AudioAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
-#define RETURN_STATUS_IF_ERROR(x) \
-    { auto _tmp = (x); if (_tmp != OK) return _tmp; }
-
 namespace android {
 
 status_t AudioAttributes::readFromParcel(const Parcel* parcel) {
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 9091599..7b273ec 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -32,16 +32,12 @@
 #include <private/media/AudioEffectShared.h>
 #include <utils/Log.h>
 
-#define RETURN_STATUS_IF_ERROR(x)    \
-    {                                \
-        auto _tmp = (x);             \
-        if (_tmp != OK) return _tmp; \
-    }
-
 namespace android {
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
 using media::IAudioPolicyService;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioUuid;
 
 namespace {
 
@@ -65,8 +61,7 @@
 status_t AudioEffect::set(const effect_uuid_t *type,
                 const effect_uuid_t *uuid,
                 int32_t priority,
-                effect_callback_t cbf,
-                void* user,
+                const wp<IAudioEffectCallback>& callback,
                 audio_session_t sessionId,
                 audio_io_handle_t io,
                 const AudioDeviceTypeAddr& device,
@@ -77,7 +72,7 @@
     sp<IMemory> cblk;
     int enabled;
 
-    ALOGV("set %p mUserData: %p uuid: %p timeLow %08x", this, user, type, type ? type->timeLow : 0);
+    ALOGV("set %p uuid: %p timeLow %08x", this, type, type ? type->timeLow : 0);
 
     if (mIEffect != 0) {
         ALOGW("Effect already in use");
@@ -100,9 +95,8 @@
     }
     mProbe = probe;
     mPriority = priority;
-    mCbf = cbf;
-    mUserData = user;
     mSessionId = sessionId;
+    mCallback = callback;
 
     memset(&mDescriptor, 0, sizeof(effect_descriptor_t));
     mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
@@ -132,6 +126,9 @@
     mStatus = audioFlinger->createEffect(request, &response);
 
     if (mStatus == OK) {
+        if (response.alreadyExists) {
+            mStatus = ALREADY_EXISTS;
+        }
         mId = response.id;
         enabled = response.enabled;
         iEffect = response.effect;
@@ -188,11 +185,60 @@
     return mStatus;
 }
 
+namespace {
+class LegacyCallbackWrapper : public AudioEffect::IAudioEffectCallback {
+ public:
+    LegacyCallbackWrapper(AudioEffect::legacy_callback_t callback, void* user):
+            mCallback(callback), mUser(user) {}
+ private:
+    void onControlStatusChanged(bool isGranted) override {
+        mCallback(AudioEffect::EVENT_CONTROL_STATUS_CHANGED, mUser, &isGranted);
+    }
+
+    void onEnableStatusChanged(bool isEnabled) override {
+        mCallback(AudioEffect::EVENT_ENABLE_STATUS_CHANGED, mUser, &isEnabled);
+    }
+
+    void onParameterChanged(std::vector<uint8_t> param) override {
+        mCallback(AudioEffect::EVENT_PARAMETER_CHANGED, mUser, param.data());
+    }
+
+    void onError(status_t errorCode) override {
+        mCallback(AudioEffect::EVENT_ERROR, mUser, &errorCode);
+    }
+
+    void onFramesProcessed(int32_t framesProcessed) override {
+        mCallback(AudioEffect::EVENT_FRAMES_PROCESSED, mUser, &framesProcessed);
+    }
+
+    const AudioEffect::legacy_callback_t mCallback;
+    void* const mUser;
+};
+} // namespace
+
+status_t AudioEffect::set(const effect_uuid_t *type,
+                const effect_uuid_t *uuid,
+                int32_t priority,
+                legacy_callback_t cbf,
+                void* user,
+                audio_session_t sessionId,
+                audio_io_handle_t io,
+                const AudioDeviceTypeAddr& device,
+                bool probe,
+                bool notifyFramesProcessed)
+{
+    if (cbf != nullptr) {
+        mLegacyWrapper = sp<LegacyCallbackWrapper>::make(cbf, user);
+    } else if (user != nullptr) {
+        LOG_ALWAYS_FATAL("%s: User provided without callback", __func__);
+    }
+    return set(type, uuid, priority, mLegacyWrapper, sessionId, io, device, probe,
+               notifyFramesProcessed);
+}
 status_t AudioEffect::set(const char *typeStr,
                 const char *uuidStr,
                 int32_t priority,
-                effect_callback_t cbf,
-                void* user,
+                const wp<IAudioEffectCallback>& callback,
                 audio_session_t sessionId,
                 audio_io_handle_t io,
                 const AudioDeviceTypeAddr& device,
@@ -214,11 +260,29 @@
         pUuid = &uuid;
     }
 
-    return set(pType, pUuid, priority, cbf, user, sessionId, io,
+    return set(pType, pUuid, priority, callback, sessionId, io,
                device, probe, notifyFramesProcessed);
 }
 
-
+status_t AudioEffect::set(const char *typeStr,
+                const char *uuidStr,
+                int32_t priority,
+                legacy_callback_t cbf,
+                void* user,
+                audio_session_t sessionId,
+                audio_io_handle_t io,
+                const AudioDeviceTypeAddr& device,
+                bool probe,
+                bool notifyFramesProcessed)
+{
+    if (cbf != nullptr) {
+        mLegacyWrapper = sp<LegacyCallbackWrapper>::make(cbf, user);
+    } else if (user != nullptr) {
+        LOG_ALWAYS_FATAL("%s: User provided without callback", __func__);
+    }
+    return set(typeStr, uuidStr, priority, mLegacyWrapper, sessionId, io, device, probe,
+               notifyFramesProcessed);
+}
 AudioEffect::~AudioEffect()
 {
     ALOGV("Destructor %p", this);
@@ -472,9 +536,9 @@
 {
     ALOGW("IEffect died");
     mStatus = DEAD_OBJECT;
-    if (mCbf != NULL) {
-        status_t status = DEAD_OBJECT;
-        mCbf(EVENT_ERROR, mUserData, &status);
+    auto cb = mCallback.promote();
+    if (cb != nullptr) {
+        cb->onError(mStatus);
     }
     mIEffect.clear();
 }
@@ -483,8 +547,8 @@
 
 void AudioEffect::controlStatusChanged(bool controlGranted)
 {
-    ALOGV("controlStatusChanged %p control %d callback %p mUserData %p", this, controlGranted, mCbf,
-            mUserData);
+    auto cb = mCallback.promote();
+    ALOGV("controlStatusChanged %p control %d callback %p", this, controlGranted, cb.get());
     if (controlGranted) {
         if (mStatus == ALREADY_EXISTS) {
             mStatus = NO_ERROR;
@@ -494,18 +558,19 @@
             mStatus = ALREADY_EXISTS;
         }
     }
-    if (mCbf != NULL) {
-        mCbf(EVENT_CONTROL_STATUS_CHANGED, mUserData, &controlGranted);
+    if (cb != nullptr) {
+        cb->onControlStatusChanged(controlGranted);
     }
 }
 
 void AudioEffect::enableStatusChanged(bool enabled)
 {
-    ALOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf);
+    auto cb = mCallback.promote();
+    ALOGV("enableStatusChanged %p enabled %d mCallback %p", this, enabled, cb.get());
     if (mStatus == ALREADY_EXISTS) {
         mEnabled = enabled;
-        if (mCbf != NULL) {
-            mCbf(EVENT_ENABLE_STATUS_CHANGED, mUserData, &enabled);
+        if (cb != nullptr) {
+            cb->onEnableStatusChanged(enabled);
         }
     }
 }
@@ -517,19 +582,20 @@
     if (cmdData.empty() || replyData.empty()) {
         return;
     }
-
-    if (mCbf != NULL && cmdCode == EFFECT_CMD_SET_PARAM) {
+    auto cb = mCallback.promote();
+    if (cb != nullptr && cmdCode == EFFECT_CMD_SET_PARAM) {
         std::vector<uint8_t> cmdDataCopy(cmdData);
         effect_param_t* cmd = reinterpret_cast<effect_param_t *>(cmdDataCopy.data());
         cmd->status = *reinterpret_cast<const int32_t *>(replyData.data());
-        mCbf(EVENT_PARAMETER_CHANGED, mUserData, cmd);
+        cb->onParameterChanged(std::move(cmdDataCopy));
     }
 }
 
 void AudioEffect::framesProcessed(int32_t frames)
 {
-    if (mCbf != NULL) {
-        mCbf(EVENT_FRAMES_PROCESSED, mUserData, &frames);
+    auto cb = mCallback.promote();
+    if (cb != nullptr) {
+        cb->onFramesProcessed(frames);
     }
 }
 
@@ -571,7 +637,7 @@
 
     int32_t audioSessionAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_session_t_int32_t(audioSession));
-    media::Int countAidl;
+    media::audio::common::Int countAidl;
     countAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*count));
     std::vector<media::EffectDescriptor> retAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -619,12 +685,12 @@
         uuid = *EFFECT_UUID_NULL;
     }
 
-    media::AudioUuid typeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(type));
-    media::AudioUuid uuidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(uuid));
+    AudioUuid typeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(type));
+    AudioUuid uuidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(uuid));
     std::string opPackageNameAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_String16_string(opPackageName));
-    media::AudioSourceType sourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(source));
+    AudioSource sourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(source));
     int32_t retAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->addSourceDefaultEffect(typeAidl, opPackageNameAidl, uuidAidl, priority, sourceAidl,
@@ -662,11 +728,11 @@
         uuid = *EFFECT_UUID_NULL;
     }
 
-    media::AudioUuid typeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(type));
-    media::AudioUuid uuidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(uuid));
+    AudioUuid typeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(type));
+    AudioUuid uuidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(uuid));
     std::string opPackageNameAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_String16_string(opPackageName));
-    media::AudioUsage usageAidl = VALUE_OR_RETURN_STATUS(
+    media::audio::common::AudioUsage usageAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_usage_t_AudioUsage(usage));
     int32_t retAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index f98027a..ecd423a 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -21,9 +21,6 @@
 #include <media/AudioAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
-#define RETURN_STATUS_IF_ERROR(x) \
-    { auto _tmp = (x); if (_tmp != OK) return _tmp; }
-
 namespace android {
 
 status_t AudioProductStrategy::readFromParcel(const Parcel* parcel) {
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index da21771..15203d6 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -20,8 +20,10 @@
 
 #include <inttypes.h>
 #include <android-base/macros.h>
+#include <android-base/stringprintf.h>
 #include <sys/resource.h>
 
+#include <audio_utils/format.h>
 #include <audiomanager/AudioManager.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/Binder.h>
@@ -39,6 +41,7 @@
 
 namespace android {
 
+using ::android::base::StringPrintf;
 using android::content::AttributionSourceState;
 using aidl_utils::statusTFromBinderStatus;
 
@@ -66,8 +69,9 @@
 
     // We double the size of input buffer for ping pong use of record buffer.
     // Assumes audio_is_linear_pcm(format)
-    if ((*frameCount = (size * 2) / (audio_channel_count_from_in_mask(channelMask) *
-            audio_bytes_per_sample(format))) == 0) {
+    const auto sampleSize = audio_channel_count_from_in_mask(channelMask) *
+                                      audio_bytes_per_sample(format);
+    if (sampleSize == 0 || ((*frameCount = (size * 2) / sampleSize) == 0)) {
         ALOGE("%s(): Unsupported configuration: sampleRate %u, format %#x, channelMask %#x",
                 __func__, sampleRate, format, channelMask);
         return BAD_VALUE;
@@ -142,7 +146,7 @@
         audio_channel_mask_t channelMask,
         const AttributionSourceState& client,
         size_t frameCount,
-        callback_t cbf,
+        legacy_callback_t callback,
         void* user,
         uint32_t notificationFrames,
         audio_session_t sessionId,
@@ -162,7 +166,39 @@
 {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
-    (void)set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
+    (void)set(inputSource, sampleRate, format, channelMask, frameCount, callback, user,
+            notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
+            uid, pid, pAttributes, selectedDeviceId, selectedMicDirection,
+            microphoneFieldDimension);
+}
+
+AudioRecord::AudioRecord(
+        audio_source_t inputSource,
+        uint32_t sampleRate,
+        audio_format_t format,
+        audio_channel_mask_t channelMask,
+        const AttributionSourceState& client,
+        size_t frameCount,
+        const wp<IAudioRecordCallback>& callback,
+        uint32_t notificationFrames,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        audio_input_flags_t flags,
+        const audio_attributes_t* pAttributes,
+        audio_port_handle_t selectedDeviceId,
+        audio_microphone_direction_t selectedMicDirection,
+        float microphoneFieldDimension)
+    : mActive(false),
+      mStatus(NO_INIT),
+      mClientAttributionSource(client),
+      mSessionId(AUDIO_SESSION_ALLOCATE),
+      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
+      mPreviousSchedulingGroup(SP_DEFAULT),
+      mProxy(nullptr)
+{
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
+    (void)set(inputSource, sampleRate, format, channelMask, frameCount, callback,
             notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
             uid, pid, pAttributes, selectedDeviceId, selectedMicDirection,
             microphoneFieldDimension);
@@ -205,26 +241,58 @@
     // Otherwise the callback thread will never exit.
     stop();
     if (mAudioRecordThread != 0) {
-        mProxy->interrupt();
         mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
+        mProxy->interrupt();
         mAudioRecordThread->requestExitAndWait();
         mAudioRecordThread.clear();
     }
-    // No lock here: worst case we remove a NULL callback which will be a nop
+
+    AutoMutex lock(mLock);
     if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
         // This may not stop all of these device callbacks!
         // TODO: Add some sort of protection.
         AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
+        mDeviceCallback.clear();
     }
 }
+namespace {
+class LegacyCallbackWrapper : public AudioRecord::IAudioRecordCallback {
+    const AudioRecord::legacy_callback_t mCallback;
+    void* const mData;
+
+  public:
+    LegacyCallbackWrapper(AudioRecord::legacy_callback_t callback, void* user)
+        : mCallback(callback), mData(user) {}
+
+    size_t onMoreData(const AudioRecord::Buffer& buffer) override {
+        AudioRecord::Buffer copy = buffer;
+        mCallback(AudioRecord::EVENT_MORE_DATA, mData, &copy);
+        return copy.size();
+    }
+
+    void onOverrun() override { mCallback(AudioRecord::EVENT_OVERRUN, mData, nullptr); }
+
+    void onMarker(uint32_t markerPosition) override {
+        mCallback(AudioRecord::EVENT_MARKER, mData, &markerPosition);
+    }
+
+    void onNewPos(uint32_t newPos) override {
+        mCallback(AudioRecord::EVENT_NEW_POS, mData, &newPos);
+    }
+
+    void onNewIAudioRecord() override {
+        mCallback(AudioRecord::EVENT_NEW_IAUDIORECORD, mData, nullptr);
+    }
+};
+}  // namespace
+
 status_t AudioRecord::set(
         audio_source_t inputSource,
         uint32_t sampleRate,
         audio_format_t format,
         audio_channel_mask_t channelMask,
         size_t frameCount,
-        callback_t cbf,
-        void* user,
+        const wp<IAudioRecordCallback>& callback,
         uint32_t notificationFrames,
         bool threadCanCallJava,
         audio_session_t sessionId,
@@ -239,8 +307,8 @@
         int32_t maxSharedAudioHistoryMs)
 {
     status_t status = NO_ERROR;
-    uint32_t channelCount;
-
+    LOG_ALWAYS_FATAL_IF(mInitialized, "%s: should not be called twice", __func__);
+    mInitialized = true;
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "notificationFrames %u, sessionId %d, transferType %d, flags %#x, attributionSource %s"
@@ -271,39 +339,9 @@
     mSelectedMicFieldDimension = microphoneFieldDimension;
     mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
 
-    switch (transferType) {
-    case TRANSFER_DEFAULT:
-        if (cbf == NULL || threadCanCallJava) {
-            transferType = TRANSFER_SYNC;
-        } else {
-            transferType = TRANSFER_CALLBACK;
-        }
-        break;
-    case TRANSFER_CALLBACK:
-        if (cbf == NULL) {
-            ALOGE("%s(): Transfer type TRANSFER_CALLBACK but cbf == NULL", __func__);
-            status = BAD_VALUE;
-            goto exit;
-        }
-        break;
-    case TRANSFER_OBTAIN:
-    case TRANSFER_SYNC:
-        break;
-    default:
-        ALOGE("%s(): Invalid transfer type %d", __func__, transferType);
-        status = BAD_VALUE;
-        goto exit;
-    }
-    mTransfer = transferType;
-
-    // invariant that mAudioRecord != 0 is true only after set() returns successfully
-    if (mAudioRecord != 0) {
-        ALOGE("%s(): Track already in use", __func__);
-        status = INVALID_OPERATION;
-        goto exit;
-    }
-
-    if (pAttributes == NULL) {
+    std::string errorMessage;
+    // Copy the state variables early so they are available for error reporting.
+    if (pAttributes == nullptr) {
         mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
         mAttributes.source = inputSource;
         if (inputSource == AUDIO_SOURCE_VOICE_COMMUNICATION
@@ -314,37 +352,74 @@
     } else {
         // stream type shouldn't be looked at, this track has audio attributes
         memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
-        ALOGV("%s(): Building AudioRecord with attributes: source=%d flags=0x%x tags=[%s]",
+        ALOGV("%s: Building AudioRecord with attributes: source=%d flags=0x%x tags=[%s]",
                 __func__, mAttributes.source, mAttributes.flags, mAttributes.tags);
     }
-
     mSampleRate = sampleRate;
-
-    // these below should probably come from the audioFlinger too...
     if (format == AUDIO_FORMAT_DEFAULT) {
         format = AUDIO_FORMAT_PCM_16_BIT;
     }
-
-    // validate parameters
-    // AudioFlinger capture only supports linear PCM
-    if (!audio_is_valid_format(format) || !audio_is_linear_pcm(format)) {
-        ALOGE("%s(): Format %#x is not linear pcm", __func__, format);
-        status = BAD_VALUE;
-        goto exit;
+    if (!audio_is_linear_pcm(format)) {
+       // Compressed capture requires direct
+       flags = (audio_input_flags_t) (flags | AUDIO_INPUT_FLAG_DIRECT);
+       ALOGI("%s(): Format %#x is not linear pcm. Setting DIRECT, using flags %#x", __func__,
+             format, flags);
     }
     mFormat = format;
-
-    if (!audio_is_input_channel(channelMask)) {
-        ALOGE("%s(): Invalid channel mask %#x", __func__, channelMask);
-        status = BAD_VALUE;
-        goto exit;
-    }
     mChannelMask = channelMask;
-    channelCount = audio_channel_count_from_in_mask(channelMask);
-    mChannelCount = channelCount;
+    mSessionId = sessionId;
+    ALOGV("%s: mSessionId %d", __func__, mSessionId);
+    mOrigFlags = mFlags = flags;
 
-    if (audio_is_linear_pcm(format)) {
-        mFrameSize = channelCount * audio_bytes_per_sample(format);
+    mTransfer = transferType;
+    switch (mTransfer) {
+    case TRANSFER_DEFAULT:
+        if (callback == nullptr || threadCanCallJava) {
+            mTransfer = TRANSFER_SYNC;
+        } else {
+            mTransfer = TRANSFER_CALLBACK;
+        }
+        break;
+    case TRANSFER_CALLBACK:
+        if (callback == nullptr) {
+            errorMessage = StringPrintf(
+                    "%s: Transfer type TRANSFER_CALLBACK but callback == nullptr", __func__);
+            status = BAD_VALUE;
+            goto error;
+        }
+        break;
+    case TRANSFER_OBTAIN:
+    case TRANSFER_SYNC:
+        break;
+    default:
+        errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, mTransfer);
+        status = BAD_VALUE;
+        goto error;
+    }
+
+    // invariant that mAudioRecord != 0 is true only after set() returns successfully
+    if (mAudioRecord != 0) {
+        errorMessage = StringPrintf("%s: Track already in use", __func__);
+        status = INVALID_OPERATION;
+        goto error;
+    }
+
+    if (!audio_is_valid_format(mFormat)) {
+        errorMessage = StringPrintf("%s: Format %#x is not valid", __func__, mFormat);
+        status = BAD_VALUE;
+        goto error;
+    }
+
+    if (!audio_is_input_channel(mChannelMask)) {
+        errorMessage = StringPrintf("%s: Invalid channel mask %#x", __func__, mChannelMask);
+        status = BAD_VALUE;
+        goto error;
+    }
+
+    mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
+
+    if (audio_is_linear_pcm(mFormat)) {
+        mFrameSize = mChannelCount * audio_bytes_per_sample(mFormat);
     } else {
         mFrameSize = sizeof(uint8_t);
     }
@@ -355,13 +430,8 @@
     mNotificationFramesReq = notificationFrames;
     // mNotificationFramesAct is initialized in createRecord_l
 
-    mSessionId = sessionId;
-    ALOGV("%s(): mSessionId %d", __func__, mSessionId);
-
-    mOrigFlags = mFlags = flags;
-    mCbf = cbf;
-
-    if (cbf != NULL) {
+    mCallback = callback;
+    if (mCallback != nullptr) {
         mAudioRecordThread = new AudioRecordThread(*this);
         mAudioRecordThread->run("AudioRecord", ANDROID_PRIORITY_AUDIO);
         // thread begins in paused state, and will not reference us until start()
@@ -381,10 +451,10 @@
             mAudioRecordThread->requestExitAndWait();
             mAudioRecordThread.clear();
         }
+        // bypass error message to avoid logging twice (createRecord_l logs the error).
         goto exit;
     }
 
-    mUserData = user;
     // TODO: add audio hardware input latency here
     mLatency = (1000LL * mFrameCount) / mSampleRate;
     mMarkerPosition = 0;
@@ -398,14 +468,48 @@
     mFramesRead = 0;
     mFramesReadServerOffset = 0;
 
-exit:
-    mStatus = status;
+error:
     if (status != NO_ERROR) {
         mMediaMetrics.markError(status, __FUNCTION__);
+        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
     }
+exit:
+    mStatus = status;
     return status;
 }
 
+status_t AudioRecord::set(
+        audio_source_t inputSource,
+        uint32_t sampleRate,
+        audio_format_t format,
+        audio_channel_mask_t channelMask,
+        size_t frameCount,
+        legacy_callback_t callback,
+        void* user,
+        uint32_t notificationFrames,
+        bool threadCanCallJava,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        audio_input_flags_t flags,
+        uid_t uid,
+        pid_t pid,
+        const audio_attributes_t* pAttributes,
+        audio_port_handle_t selectedDeviceId,
+        audio_microphone_direction_t selectedMicDirection,
+        float microphoneFieldDimension,
+        int32_t maxSharedAudioHistoryMs)
+{
+    if (callback != nullptr) {
+        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
+    } else if (user) {
+        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
+    }
+    return set(inputSource, sampleRate, format, channelMask, frameCount, mLegacyCallbackWrapper,
+        notificationFrames, threadCanCallJava, sessionId, transferType, flags, uid, pid,
+        pAttributes, selectedDeviceId, selectedMicDirection, microphoneFieldDimension,
+        maxSharedAudioHistoryMs);
+}
 // -------------------------------------------------------------------------
 
 status_t AudioRecord::start(AudioSystem::sync_event_t event, audio_session_t triggerSession)
@@ -535,12 +639,12 @@
 
 status_t AudioRecord::setMarkerPosition(uint32_t marker)
 {
+    AutoMutex lock(mLock);
     // The only purpose of setting marker position is to get a callback
-    if (mCbf == NULL) {
+    if (mCallback == nullptr) {
         return INVALID_OPERATION;
     }
 
-    AutoMutex lock(mLock);
     mMarkerPosition = marker;
     mMarkerReached = false;
 
@@ -565,12 +669,12 @@
 
 status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod)
 {
+    AutoMutex lock(mLock);
     // The only purpose of setting position update period is to get a callback
-    if (mCbf == NULL) {
+    if (mCallback == nullptr) {
         return INVALID_OPERATION;
     }
 
-    AutoMutex lock(mLock);
     mNewPosition = mProxy->getPosition() + updatePeriod;
     mUpdatePeriod = updatePeriod;
 
@@ -621,6 +725,11 @@
     if (status == OK) {
         timestamp->mPosition[ExtendedTimestamp::LOCATION_CLIENT] = mFramesRead;
         timestamp->mTimeNs[ExtendedTimestamp::LOCATION_CLIENT] = 0;
+        if (!audio_is_linear_pcm(mFormat)) {
+            // Don't do retrograde corrections or server offset if track is
+            // compressed
+            return OK;
+        }
         // server side frame offset in case AudioRecord has been restored.
         for (int i = ExtendedTimestamp::LOCATION_SERVER;
                 i < ExtendedTimestamp::LOCATION_MAX; ++i) {
@@ -668,6 +777,8 @@
 // ---- Explicit Routing ---------------------------------------------------
 status_t AudioRecord::setInputDevice(audio_port_handle_t deviceId) {
     AutoMutex lock(mLock);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
+            __func__, mPortId, deviceId, mSelectedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
@@ -754,15 +865,16 @@
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
     IAudioFlinger::CreateRecordInput input;
     IAudioFlinger::CreateRecordOutput output;
-    audio_session_t originalSessionId;
+    [[maybe_unused]] audio_session_t originalSessionId;
     void *iMemPointer;
     audio_track_cblk_t* cblk;
     status_t status;
     static const int32_t kMaxCreateAttempts = 3;
     int32_t remainingAttempts = kMaxCreateAttempts;
+    std::string errorMessage;
 
     if (audioFlinger == 0) {
-        ALOGE("%s(%d): Could not get audioflinger", __func__, mPortId);
+        errorMessage = StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId);
         status = NO_INIT;
         goto exit;
     }
@@ -828,8 +940,9 @@
             break;
         }
         if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
-            ALOGE("%s(%d): AudioFlinger could not create record track, status: %d",
-                  __func__, mPortId, status);
+            errorMessage = StringPrintf(
+                    "%s(%d): AudioFlinger could not create record track, status: %d",
+                    __func__, mPortId, status);
             goto exit;
         }
         // FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
@@ -856,9 +969,13 @@
     mRoutedDeviceId = output.selectedDeviceId;
     mSessionId = output.sessionId;
     mSampleRate = output.sampleRate;
+    mServerConfig = output.serverConfig;
+    mServerFrameSize = audio_bytes_per_frame(
+            audio_channel_count_from_in_mask(mServerConfig.channel_mask), mServerConfig.format);
+    mServerSampleSize = audio_bytes_per_sample(mServerConfig.format);
 
     if (output.cblk == 0) {
-        ALOGE("%s(%d): Could not get control block", __func__, mPortId);
+        errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
         status = NO_INIT;
         goto exit;
     }
@@ -868,7 +985,8 @@
     //       issue (e.g. by copying).
     iMemPointer = output.cblk ->unsecurePointer();
     if (iMemPointer == NULL) {
-        ALOGE("%s(%d): Could not get control block pointer", __func__, mPortId);
+        errorMessage = StringPrintf(
+                "%s(%d): Could not get control block pointer", __func__, mPortId);
         status = NO_INIT;
         goto exit;
     }
@@ -887,7 +1005,8 @@
         //       issue (e.g. by copying).
         buffers = output.buffers->unsecurePointer();
         if (buffers == NULL) {
-            ALOGE("%s(%d): Could not get buffer pointer", __func__, mPortId);
+            errorMessage = StringPrintf(
+                    "%s(%d): Could not get buffer pointer", __func__, mPortId);
             status = NO_INIT;
             goto exit;
         }
@@ -919,6 +1038,10 @@
                 mNotificationFramesReq, output.notificationFrameCount, output.frameCount);
     }
     mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
+    if (mServerConfig.format != mFormat && mCallback != nullptr) {
+        mFormatConversionBufRaw = std::make_unique<uint8_t[]>(mNotificationFramesAct * mFrameSize);
+        mFormatConversionBuffer.raw = mFormatConversionBufRaw.get();
+    }
 
     //mInput != input includes the case where mInput == AUDIO_IO_HANDLE_NONE for first creation
     if (mDeviceCallback != 0) {
@@ -945,7 +1068,7 @@
     }
 
     // update proxy
-    mProxy = new AudioRecordClientProxy(cblk, buffers, mFrameCount, mFrameSize);
+    mProxy = new AudioRecordClientProxy(cblk, buffers, mFrameCount, mServerFrameSize);
     mProxy->setEpoch(epoch);
     mProxy->setMinimum(mNotificationFramesAct);
 
@@ -978,11 +1101,38 @@
         .record();
 
 exit:
+    if (status != NO_ERROR) {
+        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+    }
+
     mStatus = status;
     // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
     return status;
 }
 
+// Report error associated with the event and some configuration details.
+void AudioRecord::reportError(status_t status, const char *event, const char *message) const
+{
+    if (status == NO_ERROR) return;
+    // We report error on the native side because some callers do not come
+    // from Java.
+    // Ensure these variables are initialized in set().
+    mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_RECORD_ERROR)
+        .set(AMEDIAMETRICS_PROP_EVENT, event)
+        .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+        .set(AMEDIAMETRICS_PROP_STATUSMESSAGE, message)
+        .set(AMEDIAMETRICS_PROP_ORIGINALFLAGS, toString(mOrigFlags).c_str())
+        .set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)mSessionId)
+        .set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
+        .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
+        .set(AMEDIAMETRICS_PROP_ENCODING, toString(mFormat).c_str())
+        .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)mChannelMask)
+        .set(AMEDIAMETRICS_PROP_FRAMECOUNT, (int32_t)mFrameCount)
+        .set(AMEDIAMETRICS_PROP_SAMPLERATE, (int32_t)mSampleRate)
+        .record();
+}
+
 status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount, size_t *nonContig)
 {
     if (audioBuffer == NULL) {
@@ -993,7 +1143,7 @@
     }
     if (mTransfer != TRANSFER_OBTAIN) {
         audioBuffer->frameCount = 0;
-        audioBuffer->size = 0;
+        audioBuffer->mSize = 0;
         audioBuffer->raw = NULL;
         if (nonContig != NULL) {
             *nonContig = 0;
@@ -1046,7 +1196,13 @@
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
                 if (newSequence == oldSequence) {
-                    status = restoreRecord_l("obtainBuffer");
+                    if (!audio_is_linear_pcm(mFormat)) {
+                        // If compressed capture, don't attempt to restore the track.
+                        // Return a DEAD_OBJECT error and let the caller recreate.
+                        tryCounter = 0;
+                    } else {
+                        status = restoreRecord_l("obtainBuffer");
+                    }
                     if (status != NO_ERROR) {
                         buffer.mFrameCount = 0;
                         buffer.mRaw = NULL;
@@ -1076,7 +1232,7 @@
     } while ((status == DEAD_OBJECT) && (tryCounter-- > 0));
 
     audioBuffer->frameCount = buffer.mFrameCount;
-    audioBuffer->size = buffer.mFrameCount * mFrameSize;
+    audioBuffer->mSize = buffer.mFrameCount * mServerFrameSize;
     audioBuffer->raw = buffer.mRaw;
     audioBuffer->sequence = oldSequence;
     if (nonContig != NULL) {
@@ -1089,7 +1245,7 @@
 {
     // FIXME add error checking on mode, by adding an internal version
 
-    size_t stepCount = audioBuffer->size / mFrameSize;
+    size_t stepCount = audioBuffer->frameCount;
     if (stepCount == 0) {
         return;
     }
@@ -1151,8 +1307,9 @@
             return ssize_t(err);
         }
 
-        size_t bytesRead = audioBuffer.size;
-        memcpy(buffer, audioBuffer.i8, bytesRead);
+        size_t bytesRead = audioBuffer.frameCount * mFrameSize;
+        memcpy_by_audio_format(buffer, mFormat, audioBuffer.raw, mServerConfig.format,
+                               audioBuffer.mSize / mServerSampleSize);
         buffer = ((char *) buffer) + bytesRead;
         userSize -= bytesRead;
         read += bytesRead;
@@ -1171,6 +1328,12 @@
 nsecs_t AudioRecord::processAudioBuffer()
 {
     mLock.lock();
+    const sp<IAudioRecordCallback> callback = mCallback.promote();
+    if (!callback) {
+        mCallback = nullptr;
+        mLock.unlock();
+        return NS_NEVER;
+    }
     if (mAwaitBoost) {
         mAwaitBoost = false;
         mLock.unlock();
@@ -1246,26 +1409,26 @@
     uint32_t sequence = mSequence;
 
     // These fields don't need to be cached, because they are assigned only by set():
-    //      mTransfer, mCbf, mUserData, mSampleRate, mFrameSize
+    //      mTransfer, mCallback, mUserData, mSampleRate, mFrameSize
 
     mLock.unlock();
 
     // perform callbacks while unlocked
     if (newOverrun) {
-        mCbf(EVENT_OVERRUN, mUserData, NULL);
+        callback->onOverrun();
+
     }
     if (markerReached) {
-        mCbf(EVENT_MARKER, mUserData, &markerPosition);
+        callback->onMarker(markerPosition.value());
     }
     while (newPosCount > 0) {
-        size_t temp = newPosition.value(); // FIXME size_t != uint32_t
-        mCbf(EVENT_NEW_POS, mUserData, &temp);
+        callback->onNewPos(newPosition.value());
         newPosition += updatePeriod;
         newPosCount--;
     }
     if (mObservedSequence != sequence) {
         mObservedSequence = sequence;
-        mCbf(EVENT_NEW_IAUDIORECORD, mUserData, NULL);
+        callback->onNewIAudioRecord();
     }
 
     // if inactive, then don't run me again until re-started
@@ -1349,9 +1512,19 @@
             }
         }
 
-        size_t reqSize = audioBuffer.size;
-        mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
-        size_t readSize = audioBuffer.size;
+        Buffer* buffer = &audioBuffer;
+        if (mServerConfig.format != mFormat) {
+            buffer = &mFormatConversionBuffer;
+            buffer->frameCount = audioBuffer.frameCount;
+            buffer->mSize = buffer->frameCount * mFrameSize;
+            buffer->sequence = audioBuffer.sequence;
+            memcpy_by_audio_format(buffer->raw, mFormat, audioBuffer.raw,
+                                   mServerConfig.format, audioBuffer.size() / mServerSampleSize);
+        }
+
+        const size_t reqSize = buffer->size();
+        const size_t readSize = callback->onMoreData(*buffer);
+        buffer->mSize = readSize;
 
         // Validate on returned size
         if (ssize_t(readSize) < 0 || readSize > reqSize) {
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index c7967e5..de8c298 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -40,19 +40,25 @@
        if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
        std::move(_tmp.value()); })
 
-#define RETURN_STATUS_IF_ERROR(x)    \
-    {                                \
-        auto _tmp = (x);             \
-        if (_tmp != OK) return _tmp; \
-    }
-
 // ----------------------------------------------------------------------------
 
 namespace android {
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
+using content::AttributionSourceState;
 using media::IAudioPolicyService;
-using android::content::AttributionSourceState;
+using media::audio::common::AudioConfig;
+using media::audio::common::AudioConfigBase;
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioMMapPolicyInfo;
+using media::audio::common::AudioMMapPolicyType;
+using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioUsage;
+using media::audio::common::Int;
 
 // client singleton for AudioFlinger binder interface
 Mutex AudioSystem::gLock;
@@ -64,6 +70,7 @@
 dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
 record_config_callback AudioSystem::gRecordConfigCallback = NULL;
 routing_callback AudioSystem::gRoutingCallback = NULL;
+vol_range_init_req_callback AudioSystem::gVolRangeInitReqCallback = NULL;
 
 // Required to be held while calling into gSoundTriggerCaptureStateListener.
 class CaptureStateListenerImpl;
@@ -336,7 +343,7 @@
     if (desc == 0) {
         *samplingRate = af->sampleRate(ioHandle);
     } else {
-        *samplingRate = desc->mSamplingRate;
+        *samplingRate = desc->getSamplingRate();
     }
     if (*samplingRate == 0) {
         ALOGE("AudioSystem::getSamplingRate failed for ioHandle %d", ioHandle);
@@ -371,7 +378,7 @@
     if (desc == 0) {
         *frameCount = af->frameCount(ioHandle);
     } else {
-        *frameCount = desc->mFrameCount;
+        *frameCount = desc->getFrameCount();
     }
     if (*frameCount == 0) {
         ALOGE("AudioSystem::getFrameCount failed for ioHandle %d", ioHandle);
@@ -406,7 +413,7 @@
     if (outputDesc == 0) {
         *latency = af->latency(output);
     } else {
-        *latency = outputDesc->mLatency;
+        *latency = outputDesc->getLatency();
     }
 
     ALOGV("getLatency() output %d, latency %d", output, *latency);
@@ -494,7 +501,7 @@
     if (desc == 0) {
         *frameCount = af->frameCountHAL(ioHandle);
     } else {
-        *frameCount = desc->mFrameCountHAL;
+        *frameCount = desc->getFrameCountHAL();
     }
     if (*frameCount == 0) {
         ALOGE("AudioSystem::getFrameCountHAL failed for ioHandle %d", ioHandle);
@@ -535,15 +542,15 @@
 Status AudioSystem::AudioFlingerClient::ioConfigChanged(
         media::AudioIoConfigEvent _event,
         const media::AudioIoDescriptor& _ioDesc) {
-    audio_io_config_event event = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioIoConfigEvent_audio_io_config_event(_event));
+    audio_io_config_event_t event = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(_event));
     sp<AudioIoDescriptor> ioDesc(
             VALUE_OR_RETURN_BINDER_STATUS(
                     aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(_ioDesc)));
 
     ALOGV("ioConfigChanged() event %d", event);
 
-    if (ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return Status::ok();
+    if (ioDesc->getIoHandle() == AUDIO_IO_HANDLE_NONE) return Status::ok();
 
     audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
     std::vector<sp<AudioDeviceCallback>> callbacksToCall;
@@ -556,93 +563,88 @@
             case AUDIO_OUTPUT_REGISTERED:
             case AUDIO_INPUT_OPENED:
             case AUDIO_INPUT_REGISTERED: {
-                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle());
                 if (oldDesc == 0) {
-                    mIoDescriptors.add(ioDesc->mIoHandle, ioDesc);
+                    mIoDescriptors.add(ioDesc->getIoHandle(), ioDesc);
                 } else {
                     deviceId = oldDesc->getDeviceId();
-                    mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
+                    mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
                 }
 
                 if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
                     deviceId = ioDesc->getDeviceId();
                     if (event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED) {
-                        auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+                        auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
                         if (it != mAudioDeviceCallbacks.end()) {
                             callbacks = it->second;
                         }
                     }
                 }
-                ALOGV("ioConfigChanged() new %s %s %d samplingRate %u, format %#x channel mask %#x "
-                      "frameCount %zu deviceId %d",
+                ALOGV("ioConfigChanged() new %s %s %s",
                       event == AUDIO_OUTPUT_OPENED || event == AUDIO_OUTPUT_REGISTERED ?
                       "output" : "input",
                       event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED ?
                       "opened" : "registered",
-                      ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
-                      ioDesc->mChannelMask,
-                      ioDesc->mFrameCount, ioDesc->getDeviceId());
+                      ioDesc->toDebugString().c_str());
             }
                 break;
             case AUDIO_OUTPUT_CLOSED:
             case AUDIO_INPUT_CLOSED: {
-                if (getIoDescriptor_l(ioDesc->mIoHandle) == 0) {
+                if (getIoDescriptor_l(ioDesc->getIoHandle()) == 0) {
                     ALOGW("ioConfigChanged() closing unknown %s %d",
-                          event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
+                          event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->getIoHandle());
                     break;
                 }
                 ALOGV("ioConfigChanged() %s %d closed",
-                      event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
+                      event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->getIoHandle());
 
-                mIoDescriptors.removeItem(ioDesc->mIoHandle);
-                mAudioDeviceCallbacks.erase(ioDesc->mIoHandle);
+                mIoDescriptors.removeItem(ioDesc->getIoHandle());
+                mAudioDeviceCallbacks.erase(ioDesc->getIoHandle());
             }
                 break;
 
             case AUDIO_OUTPUT_CONFIG_CHANGED:
             case AUDIO_INPUT_CONFIG_CHANGED: {
-                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle());
                 if (oldDesc == 0) {
                     ALOGW("ioConfigChanged() modifying unknown %s! %d",
                           event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
-                          ioDesc->mIoHandle);
+                          ioDesc->getIoHandle());
                     break;
                 }
 
                 deviceId = oldDesc->getDeviceId();
-                mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
+                mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
 
                 if (deviceId != ioDesc->getDeviceId()) {
                     deviceId = ioDesc->getDeviceId();
-                    auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+                    auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
                     if (it != mAudioDeviceCallbacks.end()) {
                         callbacks = it->second;
                     }
                 }
-                ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
-                      "channel mask %#x frameCount %zu frameCountHAL %zu deviceId %d",
+                ALOGV("ioConfigChanged() new config for %s %s",
                       event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
-                      ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
-                      ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->mFrameCountHAL,
-                      ioDesc->getDeviceId());
+                      ioDesc->toDebugString().c_str());
 
             }
                 break;
             case AUDIO_CLIENT_STARTED: {
-                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
+                sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle());
                 if (oldDesc == 0) {
-                    ALOGW("ioConfigChanged() start client on unknown io! %d", ioDesc->mIoHandle);
+                    ALOGW("ioConfigChanged() start client on unknown io! %d",
+                            ioDesc->getIoHandle());
                     break;
                 }
                 ALOGV("ioConfigChanged() AUDIO_CLIENT_STARTED  io %d port %d num callbacks %zu",
-                      ioDesc->mIoHandle, ioDesc->mPortId, mAudioDeviceCallbacks.size());
-                oldDesc->mPatch = ioDesc->mPatch;
-                auto it = mAudioDeviceCallbacks.find(ioDesc->mIoHandle);
+                      ioDesc->getIoHandle(), ioDesc->getPortId(), mAudioDeviceCallbacks.size());
+                oldDesc->setPatch(ioDesc->getPatch());
+                auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
                 if (it != mAudioDeviceCallbacks.end()) {
                     auto cbks = it->second;
-                    auto it2 = cbks.find(ioDesc->mPortId);
+                    auto it2 = cbks.find(ioDesc->getPortId());
                     if (it2 != cbks.end()) {
-                        callbacks.emplace(ioDesc->mPortId, it2->second);
+                        callbacks.emplace(ioDesc->getPortId(), it2->second);
                         deviceId = oldDesc->getDeviceId();
                     }
                 }
@@ -661,8 +663,8 @@
     // Callbacks must be called without mLock held. May lead to dead lock if calling for
     // example getRoutedDevice that updates the device and tries to acquire mLock.
     for (auto cb  : callbacksToCall) {
-        // If callbacksToCall is not empty, it implies ioDesc->mIoHandle and deviceId are valid
-        cb->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
+        // If callbacksToCall is not empty, it implies ioDesc->getIoHandle() and deviceId are valid
+        cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceId);
     }
 
     return Status::ok();
@@ -780,6 +782,11 @@
     gRoutingCallback = cb;
 }
 
+/*static*/ void AudioSystem::setVolInitReqCallback(vol_range_init_req_callback cb) {
+    Mutex::Autolock _l(gLock);
+    gVolRangeInitReqCallback = cb;
+}
+
 // client singleton for AudioPolicyService binder interface
 // protected by gLockAPS
 sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
@@ -825,6 +832,11 @@
     return ap;
 }
 
+void AudioSystem::clearAudioPolicyService() {
+    Mutex::Autolock _l(gLockAPS);
+    gAudioPolicyService.clear();
+}
+
 // ---------------------------------------------------------------------------
 
 void AudioSystem::onNewAudioModulesAvailable() {
@@ -833,35 +845,20 @@
     aps->onNewAudioModulesAvailable();
 }
 
-status_t AudioSystem::setDeviceConnectionState(audio_devices_t device,
-                                               audio_policy_dev_state_t state,
-                                               const char* device_address,
-                                               const char* device_name,
+status_t AudioSystem::setDeviceConnectionState(audio_policy_dev_state_t state,
+                                               const android::media::audio::common::AudioPort& port,
                                                audio_format_t encodedFormat) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    const char* address = "";
-    const char* name = "";
 
     if (aps == 0) return PERMISSION_DENIED;
 
-    if (device_address != NULL) {
-        address = device_address;
-    }
-    if (device_name != NULL) {
-        name = device_name;
-    }
-
-    media::AudioDevice deviceAidl;
-    deviceAidl.type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
-    deviceAidl.address = address;
-
     return statusTFromBinderStatus(
             aps->setDeviceConnectionState(
-                    deviceAidl,
                     VALUE_OR_RETURN_STATUS(
                             legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(state)),
-                    name,
-                    VALUE_OR_RETURN_STATUS(legacy2aidl_audio_format_t_AudioFormat(encodedFormat))));
+                    port,
+                    VALUE_OR_RETURN_STATUS(
+                            legacy2aidl_audio_format_t_AudioFormatDescription(encodedFormat))));
 }
 
 audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
@@ -870,9 +867,8 @@
     if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
 
     auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
-        media::AudioDevice deviceAidl;
-        deviceAidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(device));
-        deviceAidl.address = device_address;
+        AudioDevice deviceAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_device_AudioDevice(device, device_address));
 
         media::AudioPolicyDeviceState result;
         RETURN_IF_ERROR(statusTFromBinderStatus(
@@ -900,13 +896,12 @@
         name = device_name;
     }
 
-    media::AudioDevice deviceAidl;
-    deviceAidl.type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
-    deviceAidl.address = address;
+    AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(device, address));
 
     return statusTFromBinderStatus(
             aps->handleDeviceConfigChange(deviceAidl, name, VALUE_OR_RETURN_STATUS(
-                    legacy2aidl_audio_format_t_AudioFormat(encodedFormat))));
+                    legacy2aidl_audio_format_t_AudioFormatDescription(encodedFormat))));
 }
 
 status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
@@ -955,7 +950,7 @@
     if (aps == 0) return AUDIO_IO_HANDLE_NONE;
 
     auto result = [&]() -> ConversionResult<audio_io_handle_t> {
-        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+        AudioStreamType streamAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
         int32_t outputAidl;
         RETURN_IF_ERROR(
@@ -975,7 +970,8 @@
                                        audio_output_flags_t flags,
                                        audio_port_handle_t* selectedDeviceId,
                                        audio_port_handle_t* portId,
-                                       std::vector<audio_io_handle_t>* secondaryOutputs) {
+                                       std::vector<audio_io_handle_t>* secondaryOutputs,
+                                       bool *isSpatialized) {
     if (attr == nullptr) {
         ALOGE("%s NULL audio attributes", __func__);
         return BAD_VALUE;
@@ -1003,8 +999,8 @@
     media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
-    media::AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_config_t_AudioConfig(*config));
+    AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
     int32_t flagsAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
     int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
@@ -1028,6 +1024,7 @@
     *portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(responseAidl.portId));
     *secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
             responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
+    *isSpatialized = responseAidl.isSpatialized;
 
     return OK;
 }
@@ -1097,8 +1094,8 @@
     int32_t inputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
     int32_t riidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_unique_id_t_int32_t(riid));
     int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
-    media::AudioConfigBase configAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_config_base_t_AudioConfigBase(*config));
+    AudioConfigBase configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(*config, true /*isInput*/));
     int32_t flagsAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
     int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_handle_t_int32_t(*selectedDeviceId));
@@ -1154,12 +1151,19 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     int32_t indexMinAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(indexMin));
     int32_t indexMaxAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(indexMax));
-    return statusTFromBinderStatus(
+    status_t status = statusTFromBinderStatus(
             aps->initStreamVolume(streamAidl, indexMinAidl, indexMaxAidl));
+    if (status == DEAD_OBJECT) {
+        // This is a critical operation since w/o proper stream volumes no audio
+        // will be heard. Make sure we recover from a failure in any case.
+        ALOGE("Received DEAD_OBJECT from APS, clearing the client");
+        clearAudioPolicyService();
+    }
+    return status;
 }
 
 status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
@@ -1168,10 +1172,11 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
-    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     return statusTFromBinderStatus(
             aps->setStreamVolumeIndex(streamAidl, deviceAidl, indexAidl));
 }
@@ -1182,9 +1187,10 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     int32_t indexAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getStreamVolumeIndex(streamAidl, deviceAidl, &indexAidl)));
@@ -1203,7 +1209,8 @@
     media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
     int32_t indexAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(index));
-    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     return statusTFromBinderStatus(
             aps->setVolumeIndexForAttributes(attrAidl, deviceAidl, indexAidl));
 }
@@ -1216,7 +1223,8 @@
 
     media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
-    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+    AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     int32_t indexAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getVolumeIndexForAttributes(attrAidl, deviceAidl, &indexAidl)));
@@ -1255,7 +1263,7 @@
     if (aps == 0) return PRODUCT_STRATEGY_NONE;
 
     auto result = [&]() -> ConversionResult<product_strategy_t> {
-        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+        AudioStreamType streamAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
         int32_t resultAidl;
         RETURN_IF_ERROR(statusTFromBinderStatus(
@@ -1265,23 +1273,9 @@
     return result.value_or(PRODUCT_STRATEGY_NONE);
 }
 
-audio_devices_t AudioSystem::getDevicesForStream(audio_stream_type_t stream) {
-    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
-    if (aps == 0) return AUDIO_DEVICE_NONE;
-
-    auto result = [&]() -> ConversionResult<audio_devices_t> {
-        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        int32_t resultAidl;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                aps->getDevicesForStream(streamAidl, &resultAidl)));
-        return aidl2legacy_int32_t_audio_devices_t(resultAidl);
-    }();
-    return result.value_or(AUDIO_DEVICE_NONE);
-}
-
 status_t AudioSystem::getDevicesForAttributes(const AudioAttributes& aa,
-                                              AudioDeviceTypeAddrVector* devices) {
+                                              AudioDeviceTypeAddrVector* devices,
+                                              bool forVolume) {
     if (devices == nullptr) {
         return BAD_VALUE;
     }
@@ -1290,9 +1284,9 @@
 
     media::AudioAttributesEx aaAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_AudioAttributes_AudioAttributesEx(aa));
-    std::vector<media::AudioDevice> retAidl;
+    std::vector<AudioDevice> retAidl;
     RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, &retAidl)));
+            statusTFromBinderStatus(aps->getDevicesForAttributes(aaAidl, forVolume, &retAidl)));
     *devices = VALUE_OR_RETURN_STATUS(
             convertContainer<AudioDeviceTypeAddrVector>(
                     retAidl,
@@ -1368,7 +1362,7 @@
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     int32_t inPastMsAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(inPastMs));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1382,7 +1376,7 @@
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     int32_t inPastMsAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(inPastMs));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1395,8 +1389,8 @@
     if (aps == 0) return PERMISSION_DENIED;
     if (state == NULL) return BAD_VALUE;
 
-    media::AudioSourceType streamAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(stream));
+    AudioSource streamAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(stream));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->isSourceActive(streamAidl, state)));
     return OK;
@@ -1430,19 +1424,16 @@
         }
         gAudioFlinger.clear();
     }
-    {
-        Mutex::Autolock _l(gLockAPS);
-        gAudioPolicyService.clear();
-    }
+    clearAudioPolicyService();
 }
 
 status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == nullptr) return PERMISSION_DENIED;
 
-    std::vector<media::AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioUsage>>(systemUsages,
-                                                             legacy2aidl_audio_usage_t_AudioUsage));
+    std::vector<AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioUsage>>(systemUsages,
+                                                      legacy2aidl_audio_usage_t_AudioUsage));
     return statusTFromBinderStatus(aps->setSupportedSystemUsages(systemUsagesAidl));
 }
 
@@ -1462,7 +1453,7 @@
     if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
 
     auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
-        media::AudioOffloadInfo infoAidl = VALUE_OR_RETURN(
+        AudioOffloadInfo infoAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_offload_info_t_AudioOffloadInfo(info));
         media::AudioOffloadMode retAidl;
         RETURN_IF_ERROR(
@@ -1490,7 +1481,7 @@
             legacy2aidl_audio_port_role_t_AudioPortRole(role));
     media::AudioPortType typeAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_port_type_t_AudioPortType(type));
-    media::Int numPortsAidl;
+    Int numPortsAidl;
     numPortsAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_ports));
     std::vector<media::AudioPort> portsAidl;
     int32_t generationAidl;
@@ -1508,13 +1499,12 @@
     if (port == nullptr) {
         return BAD_VALUE;
     }
-
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
+    media::AudioPort portAidl;
     RETURN_STATUS_IF_ERROR(
-            statusTFromBinderStatus(aps->getAudioPort(portAidl, &portAidl)));
+            statusTFromBinderStatus(aps->getAudioPort(port->id, &portAidl)));
     *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(portAidl));
     return OK;
 }
@@ -1557,7 +1547,7 @@
     if (aps == 0) return PERMISSION_DENIED;
 
 
-    media::Int numPatchesAidl;
+    Int numPatchesAidl;
     numPatchesAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
     std::vector<media::AudioPatch> patchesAidl;
     int32_t generationAidl;
@@ -1696,7 +1686,8 @@
             statusTFromBinderStatus(aps->acquireSoundTriggerSession(&retAidl)));
     *session = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_session_t(retAidl.session));
     *ioHandle = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(retAidl.ioHandle));
-    *device = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_devices_t(retAidl.device));
+    *device = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(retAidl.device));
     return OK;
 }
 
@@ -1713,7 +1704,7 @@
     if (aps == 0) return AUDIO_MODE_INVALID;
 
     auto result = [&]() -> ConversionResult<audio_mode_t> {
-        media::AudioMode retAidl;
+        media::audio::common::AudioMode retAidl;
         RETURN_IF_ERROR(statusTFromBinderStatus(aps->getPhoneState(&retAidl)));
         return aidl2legacy_AudioMode_audio_mode_t(retAidl);
     }();
@@ -1738,8 +1729,8 @@
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
                                                               legacy2aidl_AudioDeviceTypeAddress));
     return statusTFromBinderStatus(aps->setUidDeviceAffinities(uidAidl, devicesAidl));
 }
@@ -1758,9 +1749,9 @@
     if (aps == 0) return PERMISSION_DENIED;
 
     int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return statusTFromBinderStatus(
             aps->setUserIdDeviceAffinities(userIdAidl, devicesAidl));
 }
@@ -1833,10 +1824,11 @@
     if (aps == 0) return NAN;
 
     auto result = [&]() -> ConversionResult<float> {
-        media::AudioStreamType streamAidl = VALUE_OR_RETURN(
+        AudioStreamType streamAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
         int32_t indexAidl = VALUE_OR_RETURN(convertIntegral<int32_t>(index));
-        int32_t deviceAidl = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(device));
+        AudioDeviceDescription deviceAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
         float retAidl;
         RETURN_IF_ERROR(statusTFromBinderStatus(
                 aps->getStreamVolumeDB(streamAidl, indexAidl, deviceAidl, &retAidl)));
@@ -1868,10 +1860,10 @@
 
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    media::Int numSurroundFormatsAidl;
+    Int numSurroundFormatsAidl;
     numSurroundFormatsAidl.value =
             VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
-    std::vector<media::audio::common::AudioFormat> surroundFormatsAidl;
+    std::vector<AudioFormatDescription> surroundFormatsAidl;
     std::vector<bool> surroundFormatsEnabledAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getSurroundFormats(&numSurroundFormatsAidl, &surroundFormatsAidl,
@@ -1881,7 +1873,7 @@
             convertIntegral<unsigned int>(numSurroundFormatsAidl.value));
     RETURN_STATUS_IF_ERROR(
             convertRange(surroundFormatsAidl.begin(), surroundFormatsAidl.end(), surroundFormats,
-                         aidl2legacy_AudioFormat_audio_format_t));
+                         aidl2legacy_AudioFormatDescription_audio_format_t));
     std::copy(surroundFormatsEnabledAidl.begin(), surroundFormatsEnabledAidl.end(),
             surroundFormatsEnabled);
     return OK;
@@ -1895,10 +1887,10 @@
 
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
-    media::Int numSurroundFormatsAidl;
+    Int numSurroundFormatsAidl;
     numSurroundFormatsAidl.value =
             VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
-    std::vector<media::audio::common::AudioFormat> surroundFormatsAidl;
+    std::vector<AudioFormatDescription> surroundFormatsAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getReportedSurroundFormats(&numSurroundFormatsAidl, &surroundFormatsAidl)));
 
@@ -1906,7 +1898,7 @@
             convertIntegral<unsigned int>(numSurroundFormatsAidl.value));
     RETURN_STATUS_IF_ERROR(
             convertRange(surroundFormatsAidl.begin(), surroundFormatsAidl.end(), surroundFormats,
-                         aidl2legacy_AudioFormat_audio_format_t));
+                         aidl2legacy_AudioFormatDescription_audio_format_t));
     return OK;
 }
 
@@ -1914,26 +1906,28 @@
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    media::audio::common::AudioFormat audioFormatAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_format_t_AudioFormat(audioFormat));
+    AudioFormatDescription audioFormatAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_format_t_AudioFormatDescription(audioFormat));
     return statusTFromBinderStatus(
             aps->setSurroundFormatEnabled(audioFormatAidl, enabled));
 }
 
-status_t AudioSystem::setAssistantUid(uid_t uid) {
+status_t AudioSystem::setAssistantServicesUids(const std::vector<uid_t>& uids) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
-    return statusTFromBinderStatus(aps->setAssistantUid(uidAidl));
+    std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
+                convertContainer<std::vector<int32_t>>(uids, legacy2aidl_uid_t_int32_t));
+    return statusTFromBinderStatus(aps->setAssistantServicesUids(uidsAidl));
 }
 
-status_t AudioSystem::setHotwordDetectionServiceUid(uid_t uid) {
+status_t AudioSystem::setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids) {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
-    return statusTFromBinderStatus(aps->setHotwordDetectionServiceUid(uidAidl));
+    std::vector<int32_t> activeUidsAidl = VALUE_OR_RETURN_STATUS(
+                convertContainer<std::vector<int32_t>>(activeUids, legacy2aidl_uid_t_int32_t));
+    return statusTFromBinderStatus(aps->setActiveAssistantServicesUids(activeUidsAidl));
 }
 
 status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
@@ -1966,8 +1960,21 @@
     return result.value_or(false);
 }
 
-status_t AudioSystem::getHwOffloadEncodingFormatsSupportedForA2DP(
-        std::vector<audio_format_t>* formats) {
+bool AudioSystem::isUltrasoundSupported() {
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return false;
+
+    auto result = [&]() -> ConversionResult<bool> {
+        bool retVal;
+        RETURN_IF_ERROR(
+                statusTFromBinderStatus(aps->isUltrasoundSupported(&retVal)));
+        return retVal;
+    }();
+    return result.value_or(false);
+}
+
+status_t AudioSystem::getHwOffloadFormatsSupportedForBluetoothMedia(
+        audio_devices_t device, std::vector<audio_format_t>* formats) {
     if (formats == nullptr) {
         return BAD_VALUE;
     }
@@ -1976,12 +1983,15 @@
             & aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    std::vector<media::audio::common::AudioFormat> formatsAidl;
+    std::vector<AudioFormatDescription> formatsAidl;
+    AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            aps->getHwOffloadEncodingFormatsSupportedForA2DP(&formatsAidl)));
+            aps->getHwOffloadFormatsSupportedForBluetoothMedia(deviceAidl, &formatsAidl)));
     *formats = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<audio_format_t>>(formatsAidl,
-                                                          aidl2legacy_AudioFormat_audio_format_t));
+            convertContainer<std::vector<audio_format_t>>(
+                    formatsAidl,
+                    aidl2legacy_AudioFormatDescription_audio_format_t));
     return OK;
 }
 
@@ -2120,9 +2130,9 @@
 
     int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return statusTFromBinderStatus(
             aps->setDevicesRoleForStrategy(strategyAidl, roleAidl, devicesAidl));
 }
@@ -2148,7 +2158,7 @@
     }
     int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
-    std::vector<media::AudioDevice> devicesAidl;
+    std::vector<AudioDevice> devicesAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getDevicesForRoleAndStrategy(strategyAidl, roleAidl, &devicesAidl)));
     devices = VALUE_OR_RETURN_STATUS(
@@ -2165,12 +2175,12 @@
         return PERMISSION_DENIED;
     }
 
-    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(audioSource));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return statusTFromBinderStatus(
             aps->setDevicesRoleForCapturePreset(audioSourceAidl, roleAidl, devicesAidl));
 }
@@ -2182,12 +2192,12 @@
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(audioSource));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return statusTFromBinderStatus(
             aps->addDevicesRoleForCapturePreset(audioSourceAidl, roleAidl, devicesAidl));
 }
@@ -2198,12 +2208,12 @@
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(audioSource));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return statusTFromBinderStatus(
             aps->removeDevicesRoleForCapturePreset(audioSourceAidl, roleAidl, devicesAidl));
 }
@@ -2214,8 +2224,8 @@
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(audioSource));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
     return statusTFromBinderStatus(
             aps->clearDevicesRoleForCapturePreset(audioSourceAidl, roleAidl));
@@ -2228,10 +2238,10 @@
     if (aps == 0) {
         return PERMISSION_DENIED;
     }
-    media::AudioSourceType audioSourceAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_source_t_AudioSourceType(audioSource));
+    AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_source_t_AudioSource(audioSource));
     media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
-    std::vector<media::AudioDevice> devicesAidl;
+    std::vector<AudioDevice> devicesAidl;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->getDevicesForRoleAndCapturePreset(audioSourceAidl, roleAidl, &devicesAidl)));
     devices = VALUE_OR_RETURN_STATUS(
@@ -2270,16 +2280,63 @@
 
     std::optional<media::AudioAttributesInternal> attrAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_attributes_t_AudioAttributesInternal(attributes));
-    std::optional<media::AudioConfig> configAidl = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_config_t_AudioConfig(configuration));
-    std::vector<media::AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                   legacy2aidl_AudioDeviceTypeAddress));
+    std::optional<AudioConfig> configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(configuration, false /*isInput*/));
+    std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
             aps->canBeSpatialized(attrAidl, configAidl, devicesAidl, canBeSpatialized)));
     return OK;
 }
 
+status_t AudioSystem::getDirectPlaybackSupport(const audio_attributes_t *attr,
+                                               const audio_config_t *config,
+                                               audio_direct_mode_t* directMode) {
+    if (attr == nullptr || config == nullptr || directMode == nullptr) {
+        return BAD_VALUE;
+    }
+
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+    AudioConfig configAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
+
+    media::AudioDirectMode retAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getDirectPlaybackSupport(attrAidl, configAidl, &retAidl)));
+    *directMode = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_direct_mode_t_mask(
+            static_cast<int32_t>(retAidl)));
+    return NO_ERROR;
+}
+
+status_t AudioSystem::getDirectProfilesForAttributes(const audio_attributes_t* attr,
+                                                std::vector<audio_profile>* audioProfiles) {
+    if (attr == nullptr) {
+        return BAD_VALUE;
+    }
+
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) {
+        return PERMISSION_DENIED;
+    }
+
+    media::AudioAttributesInternal attrAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attr));
+
+    std::vector<media::audio::common::AudioProfile> audioProfilesAidl;
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+            aps->getDirectProfilesForAttributes(attrAidl, &audioProfilesAidl)));
+    *audioProfiles = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_profile>>(
+                    audioProfilesAidl, aidl2legacy_AudioProfile_audio_profile, false /*isInput*/));
+
+    return NO_ERROR;
+}
 
 class CaptureStateListenerImpl : public media::BnCaptureStateListener,
                                  public IBinder::DeathRecipient {
@@ -2345,6 +2402,31 @@
     return af->setVibratorInfos(vibratorInfos);
 }
 
+status_t AudioSystem::getMmapPolicyInfo(
+        AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->getMmapPolicyInfos(policyType, policyInfos);
+}
+
+int32_t AudioSystem::getAAudioMixerBurstCount() {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->getAAudioMixerBurstCount();
+}
+
+int32_t AudioSystem::getAAudioHardwareBurstMinUsec() {
+    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->getAAudioHardwareBurstMinUsec();
+}
+
 // ---------------------------------------------------------------------------
 
 int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
@@ -2456,12 +2538,12 @@
 Status AudioSystem::AudioPolicyServiceClient::onRecordingConfigurationUpdate(
         int32_t event,
         const media::RecordClientInfo& clientInfo,
-        const media::AudioConfigBase& clientConfig,
+        const AudioConfigBase& clientConfig,
         const std::vector<media::EffectDescriptor>& clientEffects,
-        const media::AudioConfigBase& deviceConfig,
+        const AudioConfigBase& deviceConfig,
         const std::vector<media::EffectDescriptor>& effects,
         int32_t patchHandle,
-        media::AudioSourceType source) {
+        AudioSource source) {
     record_config_callback cb = NULL;
     {
         Mutex::Autolock _l(AudioSystem::gLock);
@@ -2473,13 +2555,13 @@
         record_client_info_t clientInfoLegacy = VALUE_OR_RETURN_BINDER_STATUS(
                 aidl2legacy_RecordClientInfo_record_client_info_t(clientInfo));
         audio_config_base_t clientConfigLegacy = VALUE_OR_RETURN_BINDER_STATUS(
-                aidl2legacy_AudioConfigBase_audio_config_base_t(clientConfig));
+                aidl2legacy_AudioConfigBase_audio_config_base_t(clientConfig, true /*isInput*/));
         std::vector<effect_descriptor_t> clientEffectsLegacy = VALUE_OR_RETURN_BINDER_STATUS(
                 convertContainer<std::vector<effect_descriptor_t>>(
                         clientEffects,
                         aidl2legacy_EffectDescriptor_effect_descriptor_t));
         audio_config_base_t deviceConfigLegacy = VALUE_OR_RETURN_BINDER_STATUS(
-                aidl2legacy_AudioConfigBase_audio_config_base_t(deviceConfig));
+                aidl2legacy_AudioConfigBase_audio_config_base_t(deviceConfig, true /*isInput*/));
         std::vector<effect_descriptor_t> effectsLegacy = VALUE_OR_RETURN_BINDER_STATUS(
                 convertContainer<std::vector<effect_descriptor_t>>(
                         effects,
@@ -2487,7 +2569,7 @@
         audio_patch_handle_t patchHandleLegacy = VALUE_OR_RETURN_BINDER_STATUS(
                 aidl2legacy_int32_t_audio_patch_handle_t(patchHandle));
         audio_source_t sourceLegacy = VALUE_OR_RETURN_BINDER_STATUS(
-                aidl2legacy_AudioSourceType_audio_source_t(source));
+                aidl2legacy_AudioSource_audio_source_t(source));
         cb(eventLegacy, &clientInfoLegacy, &clientConfigLegacy, clientEffectsLegacy,
            &deviceConfigLegacy, effectsLegacy, patchHandleLegacy, sourceLegacy);
     }
@@ -2507,6 +2589,19 @@
     return Status::ok();
 }
 
+Status AudioSystem::AudioPolicyServiceClient::onVolumeRangeInitRequest() {
+    vol_range_init_req_callback cb = NULL;
+    {
+        Mutex::Autolock _l(AudioSystem::gLock);
+        cb = gVolRangeInitReqCallback;
+    }
+
+    if (cb != NULL) {
+        cb();
+    }
+    return Status::ok();
+}
+
 void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
     {
         Mutex::Autolock _l(mLock);
@@ -2517,10 +2612,7 @@
             mAudioVolumeGroupCallback[i]->onServiceDied();
         }
     }
-    {
-        Mutex::Autolock _l(gLockAPS);
-        AudioSystem::gAudioPolicyService.clear();
-    }
+    AudioSystem::clearAudioPolicyService();
 
     ALOGW("AudioPolicyService server died!");
 }
@@ -2531,7 +2623,7 @@
     legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
     legacy.uid = VALUE_OR_RETURN(aidl2legacy_int32_t_uid_t(aidl.uid));
     legacy.session = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.session));
-    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source));
+    legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
     legacy.port_id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
     legacy.silenced = aidl.silenced;
     return legacy;
@@ -2543,7 +2635,7 @@
     aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(legacy.riid));
     aidl.uid = VALUE_OR_RETURN(legacy2aidl_uid_t_int32_t(legacy.uid));
     aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(legacy.session));
-    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source));
+    aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source));
     aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.port_id));
     aidl.silenced = legacy.silenced;
     return aidl;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 54d186e..6ab8339 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -173,8 +173,8 @@
     if (aps == 0) return false;
 
     auto result = [&]() -> ConversionResult<bool> {
-        media::AudioConfigBase configAidl = VALUE_OR_RETURN(
-                legacy2aidl_audio_config_base_t_AudioConfigBase(config));
+        media::audio::common::AudioConfigBase configAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_config_base_t_AudioConfigBase(config, false /*isInput*/));
         media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_attributes_t_AudioAttributesInternal(attributes));
         bool retAidl;
@@ -258,8 +258,7 @@
         audio_channel_mask_t channelMask,
         size_t frameCount,
         audio_output_flags_t flags,
-        callback_t cbf,
-        void* user,
+        const wp<IAudioTrackCallback> & callback,
         int32_t notificationFrames,
         audio_session_t sessionId,
         transfer_type transferType,
@@ -278,10 +277,91 @@
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
-    (void)set(streamType, sampleRate, format, channelMask,
-            frameCount, flags, cbf, user, notificationFrames,
-            0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+    // make_unique does not aggregate init until c++20
+    mSetParams = std::unique_ptr<SetParams>{
+            new SetParams{streamType, sampleRate, format, channelMask, frameCount, flags, callback,
+                          notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/,
+                          sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+                          doNotReconnect, maxRequiredSpeed, selectedDeviceId}};
+}
+
+namespace {
+    class LegacyCallbackWrapper : public AudioTrack::IAudioTrackCallback {
+      const AudioTrack::legacy_callback_t mCallback;
+      void * const mData;
+      public:
+        LegacyCallbackWrapper(AudioTrack::legacy_callback_t callback, void* user)
+            : mCallback(callback), mData(user) {}
+        size_t onMoreData(const AudioTrack::Buffer & buffer) override {
+          AudioTrack::Buffer copy = buffer;
+          mCallback(AudioTrack::EVENT_MORE_DATA, mData, static_cast<void*>(&copy));
+          return copy.size();
+        }
+        void onUnderrun() override {
+            mCallback(AudioTrack::EVENT_UNDERRUN, mData, nullptr);
+        }
+        void onLoopEnd(int32_t loopsRemaining) override {
+            mCallback(AudioTrack::EVENT_LOOP_END, mData, &loopsRemaining);
+        }
+        void onMarker(uint32_t markerPosition) override {
+            mCallback(AudioTrack::EVENT_MARKER, mData, &markerPosition);
+        }
+        void onNewPos(uint32_t newPos) override {
+            mCallback(AudioTrack::EVENT_NEW_POS, mData, &newPos);
+        }
+        void onBufferEnd() override {
+            mCallback(AudioTrack::EVENT_BUFFER_END, mData, nullptr);
+        }
+        void onNewIAudioTrack() override {
+            mCallback(AudioTrack::EVENT_NEW_IAUDIOTRACK, mData, nullptr);
+        }
+        void onStreamEnd() override {
+            mCallback(AudioTrack::EVENT_STREAM_END, mData, nullptr);
+        }
+        size_t onCanWriteMoreData(const AudioTrack::Buffer & buffer) override {
+          AudioTrack::Buffer copy = buffer;
+          mCallback(AudioTrack::EVENT_CAN_WRITE_MORE_DATA, mData, static_cast<void*>(&copy));
+          return copy.size();
+        }
+    };
+}
+
+AudioTrack::AudioTrack(
+        audio_stream_type_t streamType,
+        uint32_t sampleRate,
+        audio_format_t format,
+        audio_channel_mask_t channelMask,
+        size_t frameCount,
+        audio_output_flags_t flags,
+        legacy_callback_t callback,
+        void* user,
+        int32_t notificationFrames,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        const audio_offload_info_t *offloadInfo,
+        const AttributionSourceState& attributionSource,
+        const audio_attributes_t* pAttributes,
+        bool doNotReconnect,
+        float maxRequiredSpeed,
+        audio_port_handle_t selectedDeviceId)
+    : mStatus(NO_INIT),
+      mState(STATE_STOPPED),
+      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
+      mPreviousSchedulingGroup(SP_DEFAULT),
+      mPausedPosition(0),
+      mAudioTrackCallback(new AudioTrackCallback())
+{
+    mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    if (callback != nullptr) {
+        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
+    } else if (user) {
+        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
+    }
+    mSetParams = std::unique_ptr<SetParams>{new SetParams{
+            streamType, sampleRate, format, channelMask, frameCount, flags, mLegacyCallbackWrapper,
+            notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId,
+            transferType, offloadInfo, attributionSource, pAttributes, doNotReconnect,
+            maxRequiredSpeed, selectedDeviceId}};
 }
 
 AudioTrack::AudioTrack(
@@ -291,8 +371,7 @@
         audio_channel_mask_t channelMask,
         const sp<IMemory>& sharedBuffer,
         audio_output_flags_t flags,
-        callback_t cbf,
-        void* user,
+        const wp<IAudioTrackCallback>& callback,
         int32_t notificationFrames,
         audio_session_t sessionId,
         transfer_type transferType,
@@ -311,10 +390,56 @@
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
-    (void)set(streamType, sampleRate, format, channelMask,
-            0 /*frameCount*/, flags, cbf, user, notificationFrames,
-            sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
-            attributionSource, pAttributes, doNotReconnect, maxRequiredSpeed);
+    mSetParams = std::unique_ptr<SetParams>{
+            new SetParams{streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags,
+                          callback, notificationFrames, sharedBuffer, false /*threadCanCallJava*/,
+                          sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+                          doNotReconnect, maxRequiredSpeed, AUDIO_PORT_HANDLE_NONE}};
+}
+
+AudioTrack::AudioTrack(
+        audio_stream_type_t streamType,
+        uint32_t sampleRate,
+        audio_format_t format,
+        audio_channel_mask_t channelMask,
+        const sp<IMemory>& sharedBuffer,
+        audio_output_flags_t flags,
+        legacy_callback_t callback,
+        void* user,
+        int32_t notificationFrames,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        const audio_offload_info_t *offloadInfo,
+        const AttributionSourceState& attributionSource,
+        const audio_attributes_t* pAttributes,
+        bool doNotReconnect,
+        float maxRequiredSpeed)
+    : mStatus(NO_INIT),
+      mState(STATE_STOPPED),
+      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
+      mPreviousSchedulingGroup(SP_DEFAULT),
+      mPausedPosition(0),
+      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
+      mAudioTrackCallback(new AudioTrackCallback())
+{
+    mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
+    if (callback) {
+        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
+    } else if (user) {
+        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
+    }
+    mSetParams = std::unique_ptr<SetParams>{new SetParams{
+            streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags,
+            mLegacyCallbackWrapper, notificationFrames, sharedBuffer, false /*threadCanCallJava*/,
+            sessionId, transferType, offloadInfo, attributionSource, pAttributes, doNotReconnect,
+            maxRequiredSpeed, AUDIO_PORT_HANDLE_NONE}};
+}
+
+void AudioTrack::onFirstRef() {
+    if (mSetParams) {
+        set(*mSetParams);
+        mSetParams.reset();
+    }
 }
 
 AudioTrack::~AudioTrack()
@@ -357,12 +482,13 @@
     // Otherwise the callback thread will never exit.
     stop();
     if (mAudioTrackThread != 0) { // not thread safe
-        mProxy->interrupt();
         mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
+        mProxy->interrupt();
         mAudioTrackThread->requestExitAndWait();
         mAudioTrackThread.clear();
     }
-    // No lock here: worst case we remove a NULL callback which will be a nop
+
+    AutoMutex lock(mLock);
     if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
         // This may not stop all of these device callbacks!
         // TODO: Add some sort of protection.
@@ -378,8 +504,8 @@
         audio_channel_mask_t channelMask,
         size_t frameCount,
         audio_output_flags_t flags,
-        callback_t cbf,
-        void* user,
+        legacy_callback_t callback,
+        void * user,
         int32_t notificationFrames,
         const sp<IMemory>& sharedBuffer,
         bool threadCanCallJava,
@@ -392,6 +518,38 @@
         float maxRequiredSpeed,
         audio_port_handle_t selectedDeviceId)
 {
+    if (callback) {
+        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
+    } else if (user) {
+        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
+    }
+    return set(streamType, sampleRate,format, channelMask, frameCount, flags,
+               mLegacyCallbackWrapper, notificationFrames, sharedBuffer, threadCanCallJava,
+               sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+               doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+}
+status_t AudioTrack::set(
+        audio_stream_type_t streamType,
+        uint32_t sampleRate,
+        audio_format_t format,
+        audio_channel_mask_t channelMask,
+        size_t frameCount,
+        audio_output_flags_t flags,
+        const wp<IAudioTrackCallback>& callback,
+        int32_t notificationFrames,
+        const sp<IMemory>& sharedBuffer,
+        bool threadCanCallJava,
+        audio_session_t sessionId,
+        transfer_type transferType,
+        const audio_offload_info_t *offloadInfo,
+        const AttributionSourceState& attributionSource,
+        const audio_attributes_t* pAttributes,
+        bool doNotReconnect,
+        float maxRequiredSpeed,
+        audio_port_handle_t selectedDeviceId)
+{
+    LOG_ALWAYS_FATAL_IF(mInitialized, "%s: should not be called twice", __func__);
+    mInitialized = true;
     status_t status;
     uint32_t channelCount;
     pid_t callingPid;
@@ -399,7 +557,6 @@
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     std::string errorMessage;
-
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
@@ -461,7 +618,7 @@
     case TRANSFER_DEFAULT:
         if (sharedBuffer != 0) {
             transferType = TRANSFER_SHARED;
-        } else if (cbf == NULL || threadCanCallJava) {
+        } else if (callback == nullptr|| threadCanCallJava) {
             transferType = TRANSFER_SYNC;
         } else {
             transferType = TRANSFER_CALLBACK;
@@ -469,9 +626,9 @@
         break;
     case TRANSFER_CALLBACK:
     case TRANSFER_SYNC_NOTIF_CALLBACK:
-        if (cbf == NULL || sharedBuffer != 0) {
+        if (callback == nullptr || sharedBuffer != 0) {
             errorMessage = StringPrintf(
-                    "%s: Transfer type %s but cbf == NULL || sharedBuffer != 0",
+                    "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
                     convertTransferToText(transferType), __func__);
             status = BAD_VALUE;
             goto error;
@@ -571,11 +728,13 @@
     //  (b) we can support re-creation of offloaded tracks
     if (offloadInfo != NULL) {
         mOffloadInfoCopy = *offloadInfo;
-        mOffloadInfo = &mOffloadInfoCopy;
     } else {
-        mOffloadInfo = NULL;
         memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
         mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
+        mOffloadInfoCopy.format = format;
+        mOffloadInfoCopy.sample_rate = sampleRate;
+        mOffloadInfoCopy.channel_mask = channelMask;
+        mOffloadInfoCopy.stream_type = streamType;
     }
 
     mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
@@ -622,10 +781,10 @@
         mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(callingPid));
     }
     mAuxEffectId = 0;
-    mCbf = cbf;
+    mCallback = callback;
 
-    if (cbf != NULL) {
-        mAudioTrackThread = new AudioTrackThread(*this);
+    if (callback != nullptr) {
+        mAudioTrackThread = sp<AudioTrackThread>::make(*this);
         mAudioTrackThread->run("AudioTrack", ANDROID_PRIORITY_AUDIO, 0 /*stack*/);
         // thread begins in paused state, and will not reference us until start()
     }
@@ -645,7 +804,6 @@
         goto exit;
     }
 
-    mUserData = user;
     mLoopCount = 0;
     mLoopStart = 0;
     mLoopEnd = 0;
@@ -695,7 +853,7 @@
         uint32_t channelMask,
         size_t frameCount,
         audio_output_flags_t flags,
-        callback_t cbf,
+        legacy_callback_t callback,
         void* user,
         int32_t notificationFrames,
         const sp<IMemory>& sharedBuffer,
@@ -714,11 +872,15 @@
     attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
     attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
     attributionSource.token = sp<BBinder>::make();
-    return set(streamType, sampleRate, format,
-            static_cast<audio_channel_mask_t>(channelMask),
-            frameCount, flags, cbf, user, notificationFrames, sharedBuffer,
-            threadCanCallJava, sessionId, transferType, offloadInfo, attributionSource,
-            pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
+    if (callback) {
+        mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
+    } else if (user) {
+        LOG_ALWAYS_FATAL("Callback data provided without callback pointer!");
+    }
+    return set(streamType, sampleRate, format, static_cast<audio_channel_mask_t>(channelMask),
+               frameCount, flags, mLegacyCallbackWrapper, notificationFrames, sharedBuffer,
+               threadCanCallJava, sessionId, transferType, offloadInfo, attributionSource,
+               pAttributes, doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
 // -------------------------------------------------------------------------
@@ -1351,10 +1513,6 @@
     if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
         return NO_INIT;
     }
-    // Reject if timed track or compressed audio.
-    if (!audio_is_linear_pcm(mFormat)) {
-        return INVALID_OPERATION;
-    }
 
     ssize_t originalBufferSize = mProxy->getBufferSizeInFrames();
     ssize_t finalBufferSize  = mProxy->setBufferSizeInFrames((uint32_t) bufferSizeInFrames);
@@ -1447,12 +1605,12 @@
 
 status_t AudioTrack::setMarkerPosition(uint32_t marker)
 {
+    AutoMutex lock(mLock);
     // The only purpose of setting marker position is to get a callback
-    if (mCbf == NULL || isOffloadedOrDirect()) {
+    if (!mCallback.promote() || isOffloadedOrDirect_l()) {
         return INVALID_OPERATION;
     }
 
-    AutoMutex lock(mLock);
     mMarkerPosition = marker;
     mMarkerReached = false;
 
@@ -1480,12 +1638,12 @@
 
 status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod)
 {
+    AutoMutex lock(mLock);
     // The only purpose of setting position update period is to get a callback
-    if (mCbf == NULL || isOffloadedOrDirect()) {
+    if (!mCallback.promote() || isOffloadedOrDirect_l()) {
         return INVALID_OPERATION;
     }
 
-    AutoMutex lock(mLock);
     mNewPosition = updateAndGetPosition_l() + updatePeriod;
     mUpdatePeriod = updatePeriod;
 
@@ -1631,6 +1789,8 @@
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
     AutoMutex lock(mLock);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
+            __func__, mPortId, deviceId, mSelectedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
@@ -1878,7 +2038,7 @@
                 mAwaitBoost = true;
             }
         } else {
-            ALOGD("%s(%d): AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu",
+            ALOGV("%s(%d): AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu -> %zu",
                   __func__, mPortId, mReqFrameCount, mFrameCount);
         }
     }
@@ -2065,7 +2225,7 @@
     }
     if (mTransfer != TRANSFER_OBTAIN) {
         audioBuffer->frameCount = 0;
-        audioBuffer->size = 0;
+        audioBuffer->mSize = 0;
         audioBuffer->raw = NULL;
         if (nonContig != NULL) {
             *nonContig = 0;
@@ -2157,7 +2317,7 @@
     } while (((status == DEAD_OBJECT) || (status == NOT_ENOUGH_DATA)) && (tryCounter-- > 0));
 
     audioBuffer->frameCount = buffer.mFrameCount;
-    audioBuffer->size = buffer.mFrameCount * mFrameSize;
+    audioBuffer->mSize = buffer.mFrameCount * mFrameSize;
     audioBuffer->raw = buffer.mRaw;
     audioBuffer->sequence = oldSequence;
     if (nonContig != NULL) {
@@ -2173,7 +2333,7 @@
         return;
     }
 
-    size_t stepCount = audioBuffer->size / mFrameSize;
+    size_t stepCount = audioBuffer->mSize / mFrameSize;
     if (stepCount == 0) {
         return;
     }
@@ -2253,8 +2413,8 @@
             return ssize_t(err);
         }
 
-        size_t toWrite = audioBuffer.size;
-        memcpy(audioBuffer.i8, buffer, toWrite);
+        size_t toWrite = audioBuffer.size();
+        memcpy(audioBuffer.raw, buffer, toWrite);
         buffer = ((const char *) buffer) + toWrite;
         userSize -= toWrite;
         written += toWrite;
@@ -2284,10 +2444,15 @@
 {
     // Currently the AudioTrack thread is not created if there are no callbacks.
     // Would it ever make sense to run the thread, even without callbacks?
-    // If so, then replace this by checks at each use for mCbf != NULL.
+    // If so, then replace this by checks at each use for mCallback != NULL.
     LOG_ALWAYS_FATAL_IF(mCblk == NULL);
-
     mLock.lock();
+    sp<IAudioTrackCallback> callback = mCallback.promote();
+    if (!callback) {
+        mCallback = nullptr;
+        mLock.unlock();
+        return NS_NEVER;
+    }
     if (mAwaitBoost) {
         mAwaitBoost = false;
         mLock.unlock();
@@ -2385,7 +2550,7 @@
     sp<AudioTrackClientProxy> proxy = mProxy;
 
     // Determine the number of new loop callback(s) that will be needed, while locked.
-    int loopCountNotifications = 0;
+    uint32_t loopCountNotifications = 0;
     uint32_t loopPeriod = 0; // time in frames for next EVENT_LOOP_END or EVENT_BUFFER_END
 
     if (mLoopCount > 0) {
@@ -2407,7 +2572,7 @@
     }
 
     // These fields don't need to be cached, because they are assigned only by set():
-    //     mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFlags
+    // mTransfer, mCallback, mUserData, mFormat, mFrameSize, mFlags
     // mFlags is also assigned by createTrack_l(), but not the bit we care about.
 
     mLock.unlock();
@@ -2432,7 +2597,7 @@
             if (status != DEAD_OBJECT) {
                 // for DEAD_OBJECT, we do not send a EVENT_STREAM_END after stop();
                 // instead, the application should handle the EVENT_NEW_IAUDIOTRACK.
-                mCbf(EVENT_STREAM_END, mUserData, NULL);
+                callback->onStreamEnd();
             }
             {
                 AutoMutex lock(mLock);
@@ -2455,28 +2620,27 @@
 
     // perform callbacks while unlocked
     if (newUnderrun) {
-        mCbf(EVENT_UNDERRUN, mUserData, NULL);
+        callback->onUnderrun();
     }
     while (loopCountNotifications > 0) {
-        mCbf(EVENT_LOOP_END, mUserData, NULL);
         --loopCountNotifications;
+        callback->onLoopEnd(mLoopCount > 0 ? loopCountNotifications + mLoopCountNotified : -1);
     }
     if (flags & CBLK_BUFFER_END) {
-        mCbf(EVENT_BUFFER_END, mUserData, NULL);
+        callback->onBufferEnd();
     }
     if (markerReached) {
-        mCbf(EVENT_MARKER, mUserData, &markerPosition);
+        callback->onMarker(markerPosition.value());
     }
     while (newPosCount > 0) {
-        size_t temp = newPosition.value(); // FIXME size_t != uint32_t
-        mCbf(EVENT_NEW_POS, mUserData, &temp);
+        callback->onNewPos(newPosition.value());
         newPosition += updatePeriod;
         newPosCount--;
     }
 
     if (mObservedSequence != sequence) {
         mObservedSequence = sequence;
-        mCbf(EVENT_NEW_IAUDIOTRACK, mUserData, NULL);
+        callback->onNewIAudioTrack();
         // for offloaded tracks, just wait for the upper layers to recreate the track
         if (isOffloadedOrDirect()) {
             return NS_INACTIVE;
@@ -2608,16 +2772,15 @@
             }
         }
 
-        size_t reqSize = audioBuffer.size;
+        size_t reqSize = audioBuffer.size();
         if (mTransfer == TRANSFER_SYNC_NOTIF_CALLBACK) {
             // when notifying client it can write more data, pass the total size that can be
             // written in the next write() call, since it's not passed through the callback
-            audioBuffer.size += nonContig;
+            audioBuffer.mSize += nonContig;
         }
-        mCbf(mTransfer == TRANSFER_CALLBACK ? EVENT_MORE_DATA : EVENT_CAN_WRITE_MORE_DATA,
-                mUserData, &audioBuffer);
-        size_t writtenSize = audioBuffer.size;
-
+        const size_t writtenSize = (mTransfer == TRANSFER_CALLBACK)
+                                      ? callback->onMoreData(audioBuffer)
+                                      : callback->onCanWriteMoreData(audioBuffer);
         // Validate on returned size
         if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
             ALOGE("%s(%d): EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
@@ -2677,6 +2840,9 @@
             return ns;
         }
 
+        // releaseBuffer reads from audioBuffer.size
+        audioBuffer.mSize = writtenSize;
+
         size_t releasedFrames = writtenSize / mFrameSize;
         audioBuffer.frameCount = releasedFrames;
         mRemainingFrames -= releasedFrames;
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index 35719be..e3b79b2 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -409,7 +409,7 @@
         android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         // it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process
         (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
-                1);
+                INT_MAX);
     }
 }
 
@@ -419,7 +419,7 @@
     if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) {
         android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
-                1);
+                INT_MAX);
     }
 }
 
@@ -490,6 +490,8 @@
 status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *requested)
 {
     struct timespec total;          // total elapsed time spent waiting
+    struct timespec before;
+    bool beforeIsValid = false;
     total.tv_sec = 0;
     total.tv_nsec = 0;
     audio_track_cblk_t* cblk = mCblk;
@@ -570,17 +572,38 @@
         }
         int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex);
         if (!(old & CBLK_FUTEX_WAKE)) {
+            if (!beforeIsValid) {
+                clock_gettime(CLOCK_MONOTONIC, &before);
+                beforeIsValid = true;
+            }
             errno = 0;
             (void) syscall(__NR_futex, &cblk->mFutex,
                     mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts);
-            switch (errno) {
+            status_t error = errno; // clock_gettime can affect errno
+            {
+                struct timespec after;
+                clock_gettime(CLOCK_MONOTONIC, &after);
+                total.tv_sec += after.tv_sec - before.tv_sec;
+                // Use auto instead of long to avoid the google-runtime-int warning.
+                auto deltaNs = after.tv_nsec - before.tv_nsec;
+                if (deltaNs < 0) {
+                    deltaNs += 1000000000;
+                    total.tv_sec--;
+                }
+                if ((total.tv_nsec += deltaNs) >= 1000000000) {
+                    total.tv_nsec -= 1000000000;
+                    total.tv_sec++;
+                }
+                before = after;
+            }
+            switch (error) {
             case 0:            // normal wakeup by server, or by binderDied()
             case EWOULDBLOCK:  // benign race condition with server
             case EINTR:        // wait was interrupted by signal or other spurious wakeup
             case ETIMEDOUT:    // time-out expired
                 break;
             default:
-                status = errno;
+                status = error;
                 ALOGE("%s unexpected error %s", __func__, strerror(status));
                 goto end;
             }
@@ -747,7 +770,7 @@
             int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
             if (!(old & CBLK_FUTEX_WAKE)) {
                 (void) syscall(__NR_futex, &cblk->mFutex,
-                        mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
+                        mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, INT_MAX);
             }
         }
         mFlushed += (newFront - front) & mask;
@@ -917,7 +940,7 @@
         int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         if (!(old & CBLK_FUTEX_WAKE)) {
             (void) syscall(__NR_futex, &cblk->mFutex,
-                    mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
+                    mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, INT_MAX);
         }
     }
 
diff --git a/media/libaudioclient/AudioVolumeGroup.cpp b/media/libaudioclient/AudioVolumeGroup.cpp
index 361f7b8..ab95246 100644
--- a/media/libaudioclient/AudioVolumeGroup.cpp
+++ b/media/libaudioclient/AudioVolumeGroup.cpp
@@ -26,11 +26,10 @@
 #include <media/AudioAttributes.h>
 #include <media/PolicyAidlConversion.h>
 
-#define RETURN_STATUS_IF_ERROR(x) \
-    { auto _tmp = (x); if (_tmp != OK) return _tmp; }
-
 namespace android {
 
+using media::audio::common::AudioStreamType;
+
 status_t AudioVolumeGroup::readFromParcel(const Parcel *parcel)
 {
     media::AudioVolumeGroup aidl;
@@ -55,7 +54,7 @@
                     legacy.getAudioAttributes(),
                     legacy2aidl_audio_attributes_t_AudioAttributesInternal));
     aidl.streams = VALUE_OR_RETURN(
-            convertContainer<std::vector<media::AudioStreamType>>(legacy.getStreamTypes(),
+            convertContainer<std::vector<AudioStreamType>>(legacy.getStreamTypes(),
             legacy2aidl_audio_stream_type_t_AudioStreamType));
     return aidl;
 }
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 2af1c50..292d92f 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -17,6 +17,7 @@
 
 #define LOG_TAG "IAudioFlinger"
 //#define LOG_NDEBUG 0
+
 #include <utils/Log.h>
 
 #include <stdint.h>
@@ -30,6 +31,13 @@
 
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
+using media::audio::common::AudioChannelLayout;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioMMapPolicyInfo;
+using media::audio::common::AudioMMapPolicyType;
+using media::audio::common::AudioMode;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioUuid;
 
 #define MAX_ITEMS_PER_LIST 1024
 
@@ -40,12 +48,6 @@
        std::move(_tmp.value()); \
      })
 
-#define RETURN_STATUS_IF_ERROR(x)    \
-    {                                \
-       auto _tmp = (x);              \
-       if (_tmp != OK) return _tmp;  \
-    }
-
 #define RETURN_BINDER_IF_ERROR(x)                         \
     {                                                     \
        auto _tmp = (x);                                   \
@@ -55,7 +57,9 @@
 ConversionResult<media::CreateTrackRequest> IAudioFlinger::CreateTrackInput::toAidl() const {
     media::CreateTrackRequest aidl;
     aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
-    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(config));
+    // Do not be mislead by 'Input'--this is an input to 'createTrack', which creates output tracks.
+    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(
+                    config, false /*isInput*/));
     aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
     aidl.sharedBuffer = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(sharedBuffer));
     aidl.notificationsPerBuffer = VALUE_OR_RETURN(convertIntegral<int32_t>(notificationsPerBuffer));
@@ -74,7 +78,9 @@
 IAudioFlinger::CreateTrackInput::fromAidl(const media::CreateTrackRequest& aidl) {
     IAudioFlinger::CreateTrackInput legacy;
     legacy.attr = VALUE_OR_RETURN(aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
-    legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(aidl.config));
+    // Do not be mislead by 'Input'--this is an input to 'createTrack', which creates output tracks.
+    legacy.config = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfig_audio_config_t(aidl.config, false /*isInput*/));
     legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
     legacy.sharedBuffer = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.sharedBuffer));
     legacy.notificationsPerBuffer = VALUE_OR_RETURN(
@@ -139,7 +145,8 @@
 IAudioFlinger::CreateRecordInput::toAidl() const {
     media::CreateRecordRequest aidl;
     aidl.attr = VALUE_OR_RETURN(legacy2aidl_audio_attributes_t_AudioAttributesInternal(attr));
-    aidl.config = VALUE_OR_RETURN(legacy2aidl_audio_config_base_t_AudioConfigBase(config));
+    aidl.config = VALUE_OR_RETURN(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
     aidl.clientInfo = VALUE_OR_RETURN(legacy2aidl_AudioClient_AudioClient(clientInfo));
     aidl.riid = VALUE_OR_RETURN(legacy2aidl_audio_unique_id_t_int32_t(riid));
     aidl.maxSharedAudioHistoryMs = VALUE_OR_RETURN(
@@ -159,7 +166,8 @@
     IAudioFlinger::CreateRecordInput legacy;
     legacy.attr = VALUE_OR_RETURN(
             aidl2legacy_AudioAttributesInternal_audio_attributes_t(aidl.attr));
-    legacy.config = VALUE_OR_RETURN(aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config));
+    legacy.config = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.config, true /*isInput*/));
     legacy.clientInfo = VALUE_OR_RETURN(aidl2legacy_AudioClient_AudioClient(aidl.clientInfo));
     legacy.riid = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_unique_id_t(aidl.riid));
     legacy.maxSharedAudioHistoryMs = VALUE_OR_RETURN(
@@ -189,6 +197,8 @@
     aidl.buffers = VALUE_OR_RETURN(legacy2aidl_NullableIMemory_SharedFileRegion(buffers));
     aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
     aidl.audioRecord = audioRecord;
+    aidl.serverConfig = VALUE_OR_RETURN(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(serverConfig, true /*isInput*/));
     return aidl;
 }
 
@@ -209,6 +219,8 @@
     legacy.buffers = VALUE_OR_RETURN(aidl2legacy_NullableSharedFileRegion_IMemory(aidl.buffers));
     legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
     legacy.audioRecord = aidl.audioRecord;
+    legacy.serverConfig = VALUE_OR_RETURN(
+            aidl2legacy_AudioConfigBase_audio_config_base_t(aidl.serverConfig, true /*isInput*/));
     return legacy;
 }
 
@@ -242,9 +254,9 @@
 audio_format_t AudioFlingerClientAdapter::format(audio_io_handle_t output) const {
     auto result = [&]() -> ConversionResult<audio_format_t> {
         int32_t outputAidl = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(output));
-        media::audio::common::AudioFormat aidlRet;
+        AudioFormatDescription aidlRet;
         RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->format(outputAidl, &aidlRet)));
-        return aidl2legacy_AudioFormat_audio_format_t(aidlRet);
+        return aidl2legacy_AudioFormatDescription_audio_format_t(aidlRet);
     }();
     return result.value_or(AUDIO_FORMAT_INVALID);
 }
@@ -309,14 +321,14 @@
 
 status_t AudioFlingerClientAdapter::setStreamVolume(audio_stream_type_t stream, float value,
                                                     audio_io_handle_t output) {
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
     return statusTFromBinderStatus(mDelegate->setStreamVolume(streamAidl, value, outputAidl));
 }
 
 status_t AudioFlingerClientAdapter::setStreamMute(audio_stream_type_t stream, bool muted) {
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
 }
@@ -324,7 +336,7 @@
 float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
                                               audio_io_handle_t output) const {
     auto result = [&]() -> ConversionResult<float> {
-        media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
                 legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
         int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
         float aidlRet;
@@ -338,7 +350,7 @@
 
 bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
     auto result = [&]() -> ConversionResult<bool> {
-        media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
                 legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
         bool aidlRet;
         RETURN_IF_ERROR(statusTFromBinderStatus(
@@ -350,7 +362,7 @@
 }
 
 status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
-    media::AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
+    AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
     return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
 }
 
@@ -410,10 +422,10 @@
                                                      audio_channel_mask_t channelMask) const {
     auto result = [&]() -> ConversionResult<size_t> {
         int32_t sampleRateAidl = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
-        media::audio::common::AudioFormat formatAidl = VALUE_OR_RETURN(
-                legacy2aidl_audio_format_t_AudioFormat(format));
-        int32_t channelMaskAidl = VALUE_OR_RETURN(
-                legacy2aidl_audio_channel_mask_t_int32_t(channelMask));
+        AudioFormatDescription formatAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_format_t_AudioFormatDescription(format));
+        AudioChannelLayout channelMaskAidl = VALUE_OR_RETURN(
+                legacy2aidl_audio_channel_mask_t_AudioChannelLayout(channelMask, true /*isInput*/));
         int64_t aidlRet;
         RETURN_IF_ERROR(statusTFromBinderStatus(
                 mDelegate->getInputBufferSize(sampleRateAidl, formatAidl, channelMaskAidl,
@@ -469,7 +481,7 @@
 }
 
 status_t AudioFlingerClientAdapter::invalidateStream(audio_stream_type_t stream) {
-    media::AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
+    AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     return statusTFromBinderStatus(mDelegate->invalidateStream(streamAidl));
 }
@@ -568,9 +580,9 @@
                                                         const effect_uuid_t* pTypeUUID,
                                                         uint32_t preferredTypeFlag,
                                                         effect_descriptor_t* pDescriptor) const {
-    media::AudioUuid effectUuidAidl = VALUE_OR_RETURN_STATUS(
+    AudioUuid effectUuidAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_uuid_t_AudioUuid(*pEffectUUID));
-    media::AudioUuid typeUuidAidl = VALUE_OR_RETURN_STATUS(
+    AudioUuid typeUuidAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_uuid_t_AudioUuid(*pTypeUUID));
     int32_t preferredTypeFlagAidl = VALUE_OR_RETURN_STATUS(
             convertReinterpret<int32_t>(preferredTypeFlag));
@@ -765,6 +777,38 @@
     return statusTFromBinderStatus(mDelegate->updateSecondaryOutputs(trackSecondaryOutputInfos));
 }
 
+status_t AudioFlingerClientAdapter::getMmapPolicyInfos(
+        AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
+    return statusTFromBinderStatus(mDelegate->getMmapPolicyInfos(policyType, policyInfos));
+}
+
+int32_t AudioFlingerClientAdapter::getAAudioMixerBurstCount() {
+    auto result = [&]() -> ConversionResult<int32_t> {
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(mDelegate->getAAudioMixerBurstCount(&aidlRet)));
+        return convertIntegral<int32_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+int32_t AudioFlingerClientAdapter::getAAudioHardwareBurstMinUsec() {
+    auto result = [&]() -> ConversionResult<int32_t> {
+        int32_t aidlRet;
+        RETURN_IF_ERROR(statusTFromBinderStatus(
+                mDelegate->getAAudioHardwareBurstMinUsec(&aidlRet)));
+        return convertIntegral<int32_t>(aidlRet);
+    }();
+    // Failure is ignored.
+    return result.value_or(0);
+}
+
+status_t AudioFlingerClientAdapter::setDeviceConnectedState(
+        const struct audio_port_v7 *port, bool connected) {
+    media::AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPort(*port));
+    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
+}
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
@@ -810,11 +854,11 @@
 }
 
 Status AudioFlingerServerAdapter::format(int32_t output,
-                                         media::audio::common::AudioFormat* _aidl_return) {
+                                         AudioFormatDescription* _aidl_return) {
     audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_int32_t_audio_io_handle_t(output));
     *_aidl_return = VALUE_OR_RETURN_BINDER(
-            legacy2aidl_audio_format_t_AudioFormat(mDelegate->format(outputLegacy)));
+            legacy2aidl_audio_format_t_AudioFormatDescription(mDelegate->format(outputLegacy)));
     return Status::ok();
 }
 
@@ -860,7 +904,7 @@
     return Status::fromStatusT(mDelegate->getMasterBalance(_aidl_return));
 }
 
-Status AudioFlingerServerAdapter::setStreamVolume(media::AudioStreamType stream, float value,
+Status AudioFlingerServerAdapter::setStreamVolume(AudioStreamType stream, float value,
                                                   int32_t output) {
     audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
@@ -869,13 +913,13 @@
     return Status::fromStatusT(mDelegate->setStreamVolume(streamLegacy, value, outputLegacy));
 }
 
-Status AudioFlingerServerAdapter::setStreamMute(media::AudioStreamType stream, bool muted) {
+Status AudioFlingerServerAdapter::setStreamMute(AudioStreamType stream, bool muted) {
     audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
     return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
 }
 
-Status AudioFlingerServerAdapter::streamVolume(media::AudioStreamType stream, int32_t output,
+Status AudioFlingerServerAdapter::streamVolume(AudioStreamType stream, int32_t output,
                                                float* _aidl_return) {
     audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
@@ -885,14 +929,14 @@
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::streamMute(media::AudioStreamType stream, bool* _aidl_return) {
+Status AudioFlingerServerAdapter::streamMute(AudioStreamType stream, bool* _aidl_return) {
     audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
     *_aidl_return = mDelegate->streamMute(streamLegacy);
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::setMode(media::AudioMode mode) {
+Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
     audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
     return Status::fromStatusT(mDelegate->setMode(modeLegacy));
 }
@@ -938,13 +982,14 @@
 }
 
 Status AudioFlingerServerAdapter::getInputBufferSize(int32_t sampleRate,
-                                                     media::audio::common::AudioFormat format,
-                                                     int32_t channelMask, int64_t* _aidl_return) {
+                                                     const AudioFormatDescription& format,
+                                                     const AudioChannelLayout& channelMask,
+                                                     int64_t* _aidl_return) {
     uint32_t sampleRateLegacy = VALUE_OR_RETURN_BINDER(convertIntegral<uint32_t>(sampleRate));
     audio_format_t formatLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioFormat_audio_format_t(format));
+            aidl2legacy_AudioFormatDescription_audio_format_t(format));
     audio_channel_mask_t channelMaskLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_int32_t_audio_channel_mask_t(channelMask));
+            aidl2legacy_AudioChannelLayout_audio_channel_mask_t(channelMask, true /*isInput*/));
     size_t size = mDelegate->getInputBufferSize(sampleRateLegacy, formatLegacy, channelMaskLegacy);
     *_aidl_return = VALUE_OR_RETURN_BINDER(convertIntegral<int64_t>(size));
     return Status::ok();
@@ -995,7 +1040,7 @@
     return Status::fromStatusT(mDelegate->closeInput(inputLegacy));
 }
 
-Status AudioFlingerServerAdapter::invalidateStream(media::AudioStreamType stream) {
+Status AudioFlingerServerAdapter::invalidateStream(AudioStreamType stream) {
     audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
     return Status::fromStatusT(mDelegate->invalidateStream(streamLegacy));
@@ -1070,8 +1115,8 @@
     return Status::ok();
 }
 
-Status AudioFlingerServerAdapter::getEffectDescriptor(const media::AudioUuid& effectUUID,
-                                                      const media::AudioUuid& typeUUID,
+Status AudioFlingerServerAdapter::getEffectDescriptor(const AudioUuid& effectUUID,
+                                                      const AudioUuid& typeUUID,
                                                       int32_t preferredTypeFlag,
                                                       media::EffectDescriptor* _aidl_return) {
     effect_uuid_t effectUuidLegacy = VALUE_OR_RETURN_BINDER(
@@ -1236,4 +1281,27 @@
     return Status::fromStatusT(mDelegate->updateSecondaryOutputs(trackSecondaryOutputs));
 }
 
+Status AudioFlingerServerAdapter::getMmapPolicyInfos(
+        AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *_aidl_return) {
+    return Status::fromStatusT(mDelegate->getMmapPolicyInfos(policyType, _aidl_return));
+}
+
+Status AudioFlingerServerAdapter::getAAudioMixerBurstCount(int32_t* _aidl_return) {
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int32_t>(mDelegate->getAAudioMixerBurstCount()));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) {
+    *_aidl_return = VALUE_OR_RETURN_BINDER(
+            convertIntegral<int32_t>(mDelegate->getAAudioHardwareBurstMinUsec()));
+    return Status::ok();
+}
+
+Status AudioFlingerServerAdapter::setDeviceConnectedState(
+        const media::AudioPort& port, bool connected) {
+    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 25fdb49..520f09c 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -25,16 +25,7 @@
 namespace android {
 
 using base::unexpected;
-
-ConversionResult<volume_group_t>
-aidl2legacy_int32_t_volume_group_t(int32_t aidl) {
-    return convertReinterpret<volume_group_t>(aidl);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_volume_group_t_int32_t(volume_group_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
+using media::audio::common::AudioDeviceAddress;
 
 ConversionResult<uint32_t>
 aidl2legacy_AudioMixType_uint32_t(media::AudioMixType aidl) {
@@ -152,7 +143,7 @@
 
         case media::AudioMixMatchCriterionValue::source:
             legacy.mSource = VALUE_OR_RETURN(
-                    aidl2legacy_AudioSourceType_audio_source_t(UNION_GET(aidl, source).value()));
+                    aidl2legacy_AudioSource_audio_source_t(UNION_GET(aidl, source).value()));
             *rule |= RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET;
             return legacy;
 
@@ -184,7 +175,7 @@
 
         case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
             UNION_SET(aidl, source,
-                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.mSource)));
+                      VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.mSource)));
             break;
 
         case RULE_MATCH_UID:
@@ -232,11 +223,14 @@
                                  std::back_inserter(legacy.mCriteria),
                                  aidl2legacy_AudioMixMatchCriterion));
     legacy.mMixType = VALUE_OR_RETURN(aidl2legacy_AudioMixType_uint32_t(aidl.mixType));
-    legacy.mFormat = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(aidl.format));
+    // See 'convertAudioMixToNative' in 'android_media_AudioSystem.cpp' -- only
+    // an output mask is expected here.
+    legacy.mFormat = VALUE_OR_RETURN(aidl2legacy_AudioConfig_audio_config_t(
+                    aidl.format, false /*isInput*/));
     legacy.mRouteFlags = VALUE_OR_RETURN(
             aidl2legacy_AudioMixRouteFlag_uint32_t_mask(aidl.routeFlags));
-    legacy.mDeviceType = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.device.type));
-    legacy.mDeviceAddress = VALUE_OR_RETURN(aidl2legacy_string_view_String8(aidl.device.address));
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+                    aidl.device, &legacy.mDeviceType, &legacy.mDeviceAddress));
     legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
     legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
     legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
@@ -251,11 +245,15 @@
                     legacy.mCriteria,
                     legacy2aidl_AudioMixMatchCriterion));
     aidl.mixType = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixType(legacy.mMixType));
-    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(legacy.mFormat));
+    // See 'convertAudioMixToNative' in 'android_media_AudioSystem.cpp' -- only
+    // an output mask is expected here.
+    aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_config_t_AudioConfig(
+                    legacy.mFormat, false /*isInput*/));
     aidl.routeFlags = VALUE_OR_RETURN(
             legacy2aidl_uint32_t_AudioMixRouteFlag_mask(legacy.mRouteFlags));
-    aidl.device.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.mDeviceType));
-    aidl.device.address = VALUE_OR_RETURN(legacy2aidl_String8_string(legacy.mDeviceAddress));
+    aidl.device = VALUE_OR_RETURN(
+            legacy2aidl_audio_device_AudioDevice(
+                    legacy.mDeviceType, legacy.mDeviceAddress));
     aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
     aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
     aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
new file mode 100644
index 0000000..3751f80
--- /dev/null
+++ b/media/libaudioclient/TEST_MAPPING
@@ -0,0 +1,15 @@
+{
+  "presubmit": [
+    {
+      "name": "audio_aidl_conversion_tests"
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index e5e8496..f968a4b 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ToneGenerator"
 
+#include <inttypes.h>
 #include <utility>
 
 #include <math.h>
@@ -866,6 +867,11 @@
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = ToneGenerator::TONEGEN_INF,
           .repeatSegment = 0 },                               // TONE_TW_RINGTONE
+        { .segments = { { .duration = 200, .waveFreq = { 400, 0 }, 0, 0 },
+                        { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = 3,
+          .repeatSegment = 0 },                              // TONE_NZ_CALL_WAITING
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -960,6 +966,16 @@
             TONE_SUP_ERROR,              // TONE_SUP_ERROR
             TONE_SUP_CALL_WAITING,       // TONE_SUP_CALL_WAITING
             TONE_TW_RINGTONE             // TONE_SUP_RINGTONE
+        },
+        {   // NEW ZEALAND
+            TONE_JAPAN_DIAL,              // TONE_SUP_DIAL
+            TONE_JAPAN_BUSY,              // TONE_SUP_BUSY
+            TONE_SUP_CONGESTION,          // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,           // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,      // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,               // TONE_SUP_ERROR
+            TONE_NZ_CALL_WAITING,         // TONE_SUP_CALL_WAITING
+            TONE_GB_RINGTONE              // TONE_SUP_RINGTONE
         }
 };
 
@@ -977,7 +993,7 @@
 //    Method:        ToneGenerator::ToneGenerator()
 //
 //    Description:    Constructor. Initializes the tone sequencer, intantiates required sine wave
-//        generators, instantiates output audio track.
+//        generators, does not initialize output audio track.
 //
 //    Input:
 //        streamType:        Type of stream used for tone playback
@@ -1037,10 +1053,29 @@
         mRegion = INDIA;
     } else if (strstr(value, "tw") != NULL) {
         mRegion = TAIWAN;
+    } else if (strstr(value, "nz") != NULL) {
+        mRegion = NZ;
     } else {
         mRegion = CEPT;
     }
 
+}
+
+////////////////////////////////////////////////////////////////////////////////
+//
+//    Method:        ToneGenerator::onFirstRef()
+//
+//    Description:  Called upon first RefBase reference. Initializes audio track
+//                  with weak pointer to self as the registered callback.
+//    Input:
+//        none
+//
+//    Output:
+//        none
+//
+////////////////////////////////////////////////////////////////////////////////
+
+void ToneGenerator::onFirstRef() {
     if (initAudioTrack()) {
         ALOGV("ToneGenerator INIT OK, time: %d", (unsigned int)(systemTime()/1000000));
     } else {
@@ -1048,9 +1083,6 @@
     }
 }
 
-
-
-
 ////////////////////////////////////////////////////////////////////////////////
 //
 //    Method:        ToneGenerator::~ToneGenerator()
@@ -1215,7 +1247,8 @@
                     sec = sec * 1000 + nsec / 1000000; // duration in milliseconds
                     mMaxSmp = (unsigned int)(((int64_t)sec * mSamplingRate) / 1000);
                 }
-                ALOGV("stopTone() forcing mMaxSmp to %d, total for far %d", mMaxSmp,  mTotalSmp);
+                ALOGV("stopTone() forcing mMaxSmp to %d, total for far %" PRIu64, mMaxSmp,
+                      mTotalSmp);
             } else {
                 mState = TONE_STOPPING;
             }
@@ -1282,8 +1315,7 @@
             AUDIO_CHANNEL_OUT_MONO,
             frameCount,
             AUDIO_OUTPUT_FLAG_FAST,
-            audioCallback,
-            this, // user
+            wp<AudioTrack::IAudioTrackCallback>::fromExisting(this),
             0,    // notificationFrames
             0,    // sharedBuffer
             mThreadCanCallJava,
@@ -1308,50 +1340,47 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 //
-//    Method:        ToneGenerator::audioCallback()
+//    Method:        ToneGenerator::onMoreData()
 //
 //    Description:    AudioTrack callback implementation. Generates a block of
 //        PCM samples
 //        and manages tone generator sequencer: tones pulses, tone duration...
 //
 //    Input:
-//        user    reference (pointer to our ToneGenerator)
-//        info    audio buffer descriptor
+//        buffer  An buffer object containing a pointer which we will fill with
+//                buffer.size() bytes.
 //
 //    Output:
-//        returned value: always true.
+//        The number of bytes we successfully wrote.
 //
 ////////////////////////////////////////////////////////////////////////////////
-void ToneGenerator::audioCallback(int event, void* user, void *info) {
+size_t ToneGenerator::onMoreData(const AudioTrack::Buffer& buffer) {
 
-    if (event != AudioTrack::EVENT_MORE_DATA) return;
-
-    AudioTrack::Buffer *buffer = static_cast<AudioTrack::Buffer *>(info);
-    ToneGenerator *lpToneGen = static_cast<ToneGenerator *>(user);
-    int16_t *lpOut = buffer->i16;
-    unsigned int lNumSmp = buffer->size/sizeof(int16_t);
-    const ToneDescriptor *lpToneDesc = lpToneGen->mpToneDesc;
-
-    if (buffer->size == 0) return;
-
+    int16_t *lpOut = reinterpret_cast<int16_t*>(buffer.data());
+    uint32_t lNumSmp = (buffer.size() / sizeof(int16_t) < UINT32_MAX) ?
+            buffer.size() / sizeof(int16_t) : UINT32_MAX;
+    if (buffer.size() == 0) return 0;
+    // We will write to the entire buffer unless we are stopped, then we return
+    // 0 at loop end
+    size_t bytesWritten = lNumSmp * sizeof(int16_t);
 
     // Clear output buffer: WaveGenerator accumulates into lpOut buffer
-    memset(lpOut, 0, buffer->size);
+    memset(lpOut, 0, buffer.size());
 
     while (lNumSmp) {
-        unsigned int lReqSmp = lNumSmp < lpToneGen->mProcessSize*2 ? lNumSmp : lpToneGen->mProcessSize;
+        unsigned int lReqSmp = lNumSmp < mProcessSize*2 ? lNumSmp : mProcessSize;
         unsigned int lGenSmp;
         unsigned int lWaveCmd = WaveGenerator::WAVEGEN_CONT;
         bool lSignal = false;
 
-        lpToneGen->mLock.lock();
+        mLock.lock();
 
 
         // Update pcm frame count and end time (current time at the end of this process)
-        lpToneGen->mTotalSmp += lReqSmp;
+        mTotalSmp += lReqSmp;
 
         // Update tone gen state machine and select wave gen command
-        switch (lpToneGen->mState) {
+        switch (mState) {
         case TONE_PLAYING:
             lWaveCmd = WaveGenerator::WAVEGEN_CONT;
             break;
@@ -1365,7 +1394,7 @@
             ALOGV("Stop/restart Cbk");
 
             lWaveCmd = WaveGenerator::WAVEGEN_STOP;
-            lpToneGen->mNextSegSmp = TONEGEN_INF; // forced to skip state machine management below
+            mNextSegSmp = TONEGEN_INF; // forced to skip state machine management below
             break;
         case TONE_STOPPED:
             ALOGV("Stopped Cbk");
@@ -1376,20 +1405,20 @@
         }
 
         // Exit if tone sequence is over
-        if (lpToneDesc->segments[lpToneGen->mCurSegment].duration == 0 ||
-            lpToneGen->mTotalSmp > lpToneGen->mMaxSmp) {
-            if (lpToneGen->mState == TONE_PLAYING) {
-                lpToneGen->mState = TONE_STOPPING;
+        if (mpToneDesc->segments[mCurSegment].duration == 0 ||
+            mTotalSmp > mMaxSmp) {
+            if (mState == TONE_PLAYING) {
+                mState = TONE_STOPPING;
             }
-            if (lpToneDesc->segments[lpToneGen->mCurSegment].duration == 0) {
+            if (mpToneDesc->segments[mCurSegment].duration == 0) {
                 goto audioCallback_EndLoop;
             }
             // fade out before stopping if maximum duration reached
             lWaveCmd = WaveGenerator::WAVEGEN_STOP;
-            lpToneGen->mNextSegSmp = TONEGEN_INF; // forced to skip state machine management below
+            mNextSegSmp = TONEGEN_INF; // forced to skip state machine management below
         }
 
-        if (lpToneGen->mTotalSmp > lpToneGen->mNextSegSmp) {
+        if (mTotalSmp > mNextSegSmp && mNextSegSmp != TONEGEN_INF) {
             // Time to go to next sequence segment
 
             ALOGV("End Segment, time: %d", (unsigned int)(systemTime()/1000000));
@@ -1397,61 +1426,61 @@
             lGenSmp = lReqSmp;
 
             // If segment,  ON -> OFF transition : ramp volume down
-            if (lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[0] != 0) {
+            if (mpToneDesc->segments[mCurSegment].waveFreq[0] != 0) {
                 lWaveCmd = WaveGenerator::WAVEGEN_STOP;
                 unsigned int lFreqIdx = 0;
-                uint16_t lFrequency = lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[lFreqIdx];
+                uint16_t lFrequency = mpToneDesc->segments[mCurSegment].waveFreq[lFreqIdx];
 
                 while (lFrequency != 0) {
-                    WaveGenerator *lpWaveGen = lpToneGen->mWaveGens.valueFor(lFrequency);
+                    WaveGenerator *lpWaveGen = mWaveGens.valueFor(lFrequency);
                     lpWaveGen->getSamples(lpOut, lGenSmp, lWaveCmd);
-                    lFrequency = lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[++lFreqIdx];
+                    lFrequency = mpToneDesc->segments[mCurSegment].waveFreq[++lFreqIdx];
                 }
                 ALOGV("ON->OFF, lGenSmp: %d, lReqSmp: %d", lGenSmp, lReqSmp);
             }
 
             // check if we need to loop and loop for the reqd times
-            if (lpToneDesc->segments[lpToneGen->mCurSegment].loopCnt) {
-                if (lpToneGen->mLoopCounter < lpToneDesc->segments[lpToneGen->mCurSegment].loopCnt) {
+            if (mpToneDesc->segments[mCurSegment].loopCnt) {
+                if (mLoopCounter < mpToneDesc->segments[mCurSegment].loopCnt) {
                     ALOGV ("in if loop loopCnt(%d) loopctr(%d), CurSeg(%d)",
-                          lpToneDesc->segments[lpToneGen->mCurSegment].loopCnt,
-                          lpToneGen->mLoopCounter,
-                          lpToneGen->mCurSegment);
-                    lpToneGen->mCurSegment = lpToneDesc->segments[lpToneGen->mCurSegment].loopIndx;
-                    ++lpToneGen->mLoopCounter;
+                          mpToneDesc->segments[mCurSegment].loopCnt,
+                          mLoopCounter,
+                          mCurSegment);
+                    mCurSegment = mpToneDesc->segments[mCurSegment].loopIndx;
+                    ++mLoopCounter;
                 } else {
                     // completed loop. go to next segment
-                    lpToneGen->mLoopCounter = 0;
-                    lpToneGen->mCurSegment++;
+                    mLoopCounter = 0;
+                    mCurSegment++;
                     ALOGV ("in else loop loopCnt(%d) loopctr(%d), CurSeg(%d)",
-                          lpToneDesc->segments[lpToneGen->mCurSegment].loopCnt,
-                          lpToneGen->mLoopCounter,
-                          lpToneGen->mCurSegment);
+                          mpToneDesc->segments[mCurSegment].loopCnt,
+                          mLoopCounter,
+                          mCurSegment);
                 }
             } else {
-                lpToneGen->mCurSegment++;
+                mCurSegment++;
                 ALOGV ("Goto next seg loopCnt(%d) loopctr(%d), CurSeg(%d)",
-                      lpToneDesc->segments[lpToneGen->mCurSegment].loopCnt,
-                      lpToneGen->mLoopCounter,
-                      lpToneGen->mCurSegment);
+                      mpToneDesc->segments[mCurSegment].loopCnt,
+                      mLoopCounter,
+                      mCurSegment);
 
             }
 
             // Handle loop if last segment reached
-            if (lpToneDesc->segments[lpToneGen->mCurSegment].duration == 0) {
-                ALOGV("Last Seg: %d", lpToneGen->mCurSegment);
+            if (mpToneDesc->segments[mCurSegment].duration == 0) {
+                ALOGV("Last Seg: %d", mCurSegment);
 
                 // Pre increment loop count and restart if total count not reached. Stop sequence otherwise
-                if (++lpToneGen->mCurCount <= lpToneDesc->repeatCnt) {
-                    ALOGV("Repeating Count: %d", lpToneGen->mCurCount);
+                if (++mCurCount <= mpToneDesc->repeatCnt) {
+                    ALOGV("Repeating Count: %d", mCurCount);
 
-                    lpToneGen->mCurSegment = lpToneDesc->repeatSegment;
-                    if (lpToneDesc->segments[lpToneDesc->repeatSegment].waveFreq[0] != 0) {
+                    mCurSegment = mpToneDesc->repeatSegment;
+                    if (mpToneDesc->segments[mpToneDesc->repeatSegment].waveFreq[0] != 0) {
                         lWaveCmd = WaveGenerator::WAVEGEN_START;
                     }
 
-                    ALOGV("New segment %d, Next Time: %lld", lpToneGen->mCurSegment,
-                            ((long long)(lpToneGen->mNextSegSmp)*1000)/lpToneGen->mSamplingRate);
+                    ALOGV("New segment %d, Next Time: %lld", mCurSegment,
+                            ((long long)(mNextSegSmp)*1000)/mSamplingRate);
 
 
                 } else {
@@ -1459,10 +1488,10 @@
                     ALOGV("End repeat, time: %d", (unsigned int)(systemTime()/1000000));
                 }
             } else {
-                ALOGV("New segment %d, Next Time: %lld", lpToneGen->mCurSegment,
-                        ((long long)(lpToneGen->mNextSegSmp)*1000)/lpToneGen->mSamplingRate);
+                ALOGV("New segment %d, Next Time: %lld", mCurSegment,
+                        ((long long)(mNextSegSmp)*1000)/mSamplingRate);
 
-                if (lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[0] != 0) {
+                if (mpToneDesc->segments[mCurSegment].waveFreq[0] != 0) {
                     // If next segment is not silent,  OFF -> ON transition : reset wave generator
                     lWaveCmd = WaveGenerator::WAVEGEN_START;
 
@@ -1472,13 +1501,16 @@
                 }
             }
 
-            // Update next segment transition position. No harm to do it also for last segment as lpToneGen->mNextSegSmp won't be used any more
-            lpToneGen->mNextSegSmp
-                    += (lpToneDesc->segments[lpToneGen->mCurSegment].duration * lpToneGen->mSamplingRate) / 1000;
+            // Update next segment transition position. No harm to do it also for last segment as
+            // mNextSegSmp won't be used any more.
+            // Handle 32 bit wraparound gracefully.
+            const uint64_t res = static_cast<uint64_t>(mNextSegSmp) +
+                    (mpToneDesc->segments[mCurSegment].duration * mSamplingRate) / 1000;
+            mNextSegSmp = static_cast<uint32_t>(std::min<uint64_t>(TONEGEN_INF, res));
 
         } else {
             // Inside a segment keep tone ON or OFF
-            if (lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[0] == 0) {
+            if (mpToneDesc->segments[mCurSegment].waveFreq[0] == 0) {
                 lGenSmp = 0;  // If odd segment, tone is currently OFF
             } else {
                 lGenSmp = lReqSmp;  // If event segment, tone is currently ON
@@ -1488,12 +1520,12 @@
         if (lGenSmp) {
             // If samples must be generated, call all active wave generators and acumulate waves in lpOut
             unsigned int lFreqIdx = 0;
-            uint16_t lFrequency = lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[lFreqIdx];
+            uint16_t lFrequency = mpToneDesc->segments[mCurSegment].waveFreq[lFreqIdx];
 
             while (lFrequency != 0) {
-                WaveGenerator *lpWaveGen = lpToneGen->mWaveGens.valueFor(lFrequency);
+                WaveGenerator *lpWaveGen = mWaveGens.valueFor(lFrequency);
                 lpWaveGen->getSamples(lpOut, lGenSmp, lWaveCmd);
-                lFrequency = lpToneDesc->segments[lpToneGen->mCurSegment].waveFreq[++lFreqIdx];
+                lFrequency = mpToneDesc->segments[mCurSegment].waveFreq[++lFreqIdx];
             }
         }
 
@@ -1501,21 +1533,19 @@
         lpOut += lReqSmp;
 
 audioCallback_EndLoop:
-
-        switch (lpToneGen->mState) {
+        switch (mState) {
         case TONE_RESTARTING:
             ALOGV("Cbk restarting track");
-            if (lpToneGen->prepareWave()) {
-                lpToneGen->mState = TONE_STARTING;
-                if (clock_gettime(CLOCK_MONOTONIC, &lpToneGen->mStartTime) != 0) {
-                    lpToneGen->mStartTime.tv_sec = 0;
+            if (prepareWave()) {
+                mState = TONE_STARTING;
+                if (clock_gettime(CLOCK_MONOTONIC, &mStartTime) != 0) {
+                    mStartTime.tv_sec = 0;
                 }
-                // must reload lpToneDesc as prepareWave() may change mpToneDesc
-                lpToneDesc = lpToneGen->mpToneDesc;
+                // must reload mpToneDesc as prepareWave() may change mpToneDesc
             } else {
                 ALOGW("Cbk restarting prepareWave() failed");
-                lpToneGen->mState = TONE_IDLE;
-                lpToneGen->mpAudioTrack->stop();
+                mState = TONE_IDLE;
+                mpAudioTrack->stop();
                 // Force loop exit
                 lNumSmp = 0;
             }
@@ -1523,22 +1553,22 @@
             break;
         case TONE_STOPPING:
             ALOGV("Cbk Stopping");
-            lpToneGen->mState = TONE_STOPPED;
+            mState = TONE_STOPPED;
             // Force loop exit
             lNumSmp = 0;
             break;
         case TONE_STOPPED:
-            lpToneGen->mState = TONE_INIT;
+            mState = TONE_INIT;
             ALOGV("Cbk Stopped track");
-            lpToneGen->mpAudioTrack->stop();
+            mpAudioTrack->stop();
             // Force loop exit
             lNumSmp = 0;
-            buffer->size = 0;
+            bytesWritten = 0;
             lSignal = true;
             break;
         case TONE_STARTING:
             ALOGV("Cbk starting track");
-            lpToneGen->mState = TONE_PLAYING;
+            mState = TONE_PLAYING;
             lSignal = true;
             break;
         case TONE_PLAYING:
@@ -1546,14 +1576,15 @@
         default:
             // Force loop exit
             lNumSmp = 0;
-            buffer->size = 0;
+            bytesWritten = 0;
             break;
         }
 
         if (lSignal)
-            lpToneGen->mWaitCbkCond.broadcast();
-        lpToneGen->mLock.unlock();
+            mWaitCbkCond.broadcast();
+        mLock.unlock();
     }
+    return bytesWritten;
 }
 
 
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 188f321..4fc1c44 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -33,11 +33,14 @@
     doDestroy();
 }
 
-void TrackPlayerBase::init(AudioTrack* pat, player_type_t playerType, audio_usage_t usage,
-        audio_session_t sessionId) {
+void TrackPlayerBase::init(const sp<AudioTrack>& pat,
+                           const sp<AudioTrack::IAudioTrackCallback>& callback,
+                           player_type_t playerType, audio_usage_t usage,
+                           audio_session_t sessionId) {
     PlayerBase::init(playerType, usage, sessionId);
     mAudioTrack = pat;
     if (mAudioTrack != 0) {
+        mCallbackHandle = callback;
         mSelfAudioDeviceCallback = new SelfAudioDeviceCallback(*this);
         mAudioTrack->addAudioDeviceCallback(mSelfAudioDeviceCallback);
         mAudioTrack->setPlayerIId(mPIId); // set in PlayerBase::init().
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
index 04a02c7..335866f 100644
--- a/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesEx.aidl
@@ -17,7 +17,7 @@
 package android.media;
 
 import android.media.AudioAttributesInternal;
-import android.media.AudioStreamType;
+import android.media.audio.common.AudioStreamType;
 
 /**
  * This is the equivalent of the android::AudioAttributes C++ type.
diff --git a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
index 699df0a..2e74206 100644
--- a/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioAttributesInternal.aidl
@@ -16,9 +16,9 @@
 
 package android.media;
 
-import android.media.AudioContentType;
-import android.media.AudioSourceType;
-import android.media.AudioUsage;
+import android.media.audio.common.AudioContentType;
+import android.media.audio.common.AudioSource;
+import android.media.audio.common.AudioUsage;
 
 /**
  * The "Internal" suffix of this type name is to disambiguate it from the
@@ -28,7 +28,7 @@
 parcelable AudioAttributesInternal {
     AudioContentType contentType;
     AudioUsage usage;
-    AudioSourceType source;
+    AudioSource source;
     // Bitmask, indexed by AudioFlag.
     int flags;
     @utf8InCpp String tags; /* UTF8 */
diff --git a/media/libaudioclient/aidl/android/media/AudioConfig.aidl b/media/libaudioclient/aidl/android/media/AudioConfig.aidl
deleted file mode 100644
index 8dc97d3..0000000
--- a/media/libaudioclient/aidl/android/media/AudioConfig.aidl
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioOffloadInfo;
-import android.media.audio.common.AudioFormat;
-
-/**
- * {@hide}
- */
-parcelable AudioConfig {
-    int sampleRate;
-    /**
-     * Interpreted as audio_channel_mask_t.
-     * TODO(ytai): Create a designated type.
-     */
-    int channelMask;
-    AudioFormat format;
-    AudioOffloadInfo offloadInfo;
-    long frameCount;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl b/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
deleted file mode 100644
index 8353c0d..0000000
--- a/media/libaudioclient/aidl/android/media/AudioConfigBase.aidl
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.audio.common.AudioFormat;
-
-/**
- * {@hide}
- */
-parcelable AudioConfigBase {
-    int sampleRate;
-    /** Interpreted as audio_channel_mask_t. */
-    int channelMask;
-    AudioFormat format;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioContentType.aidl b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
deleted file mode 100644
index f734fba..0000000
--- a/media/libaudioclient/aidl/android/media/AudioContentType.aidl
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-@Backing(type="int")
-enum AudioContentType {
-    UNKNOWN = 0,
-    SPEECH = 1,
-    MUSIC = 2,
-    MOVIE = 3,
-    SONIFICATION = 4,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioDevice.aidl b/media/libaudioclient/aidl/android/media/AudioDevice.aidl
deleted file mode 100644
index b200697..0000000
--- a/media/libaudioclient/aidl/android/media/AudioDevice.aidl
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable AudioDevice {
-    /** Interpreted as audio_devices_t. */
-    int type;
-    @utf8InCpp String address;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioStandard.aidl b/media/libaudioclient/aidl/android/media/AudioDirectMode.aidl
similarity index 83%
rename from media/libaudioclient/aidl/android/media/AudioStandard.aidl
rename to media/libaudioclient/aidl/android/media/AudioDirectMode.aidl
index e131d0d..0da4721 100644
--- a/media/libaudioclient/aidl/android/media/AudioStandard.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioDirectMode.aidl
@@ -15,13 +15,10 @@
  */
 package android.media;
 
-/**
- * The audio standard that describe audio playback/capture capabilites.
- *
- * {@hide}
- */
 @Backing(type="int")
-enum AudioStandard {
+enum AudioDirectMode {
     NONE = 0,
-    EDID = 1,
+    OFFLOAD = 1,
+    OFFLOAD_GAPLESS = 2,
+    BITSTREAM = 4,
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl b/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl
deleted file mode 100644
index b03adfe..0000000
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationMetadataType.aidl
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioEncapsulationMetadataType {
-    NONE = 0,
-    FRAMEWORK_TUNER = 1,
-    DVB_AD_DESCRIPTOR = 2,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioFlag.aidl b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
index 91361fb..acf4e6d 100644
--- a/media/libaudioclient/aidl/android/media/AudioFlag.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioFlag.aidl
@@ -36,4 +36,5 @@
     CAPTURE_PRIVATE = 13,
     CONTENT_SPATIALIZED = 14,
     NEVER_SPATIALIZE = 15,
+    CALL_REDIRECTION = 16,
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioGain.aidl b/media/libaudioclient/aidl/android/media/AudioGain.aidl
deleted file mode 100644
index 048b295..0000000
--- a/media/libaudioclient/aidl/android/media/AudioGain.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable AudioGain {
-    int index;
-    boolean useInChannelMask;
-    boolean useForVolume;
-    /** Bitmask, indexed by AudioGainMode. */
-    int mode;
-    /** Interpreted as audio_channel_mask_t. */
-    int channelMask;
-    int minValue;
-    int maxValue;
-    int defaultValue;
-    int stepValue;
-    int minRampMs;
-    int maxRampMs;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
deleted file mode 100644
index b93c2dc..0000000
--- a/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable AudioGainConfig {
-    /** Index of the corresponding audio_gain in the audio_port gains[] table. */
-    int index;
-
-    /** Mode requested for this command. Bitfield indexed by AudioGainMode. */
-    int mode;
-
-    /**
-     * Channels which gain value follows. N/A in joint mode.
-     * Interpreted as audio_channel_mask_t.
-     */
-    int channelMask;
-
-    /**
-     * Gain values in millibels.
-     * For each channel ordered from LSb to MSb in channel mask. The number of values is 1 in joint
-     * mode, otherwise equals the number of bits implied by channelMask.
-     */
-    int[]  values;
-
-    /** Ramp duration in ms. */
-    int rampDurationMs;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainMode.aidl b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
deleted file mode 100644
index e1b9f0b..0000000
--- a/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioGainMode {
-    JOINT    = 0,
-    CHANNELS = 1,
-    RAMP     = 2,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/libaudioclient/aidl/android/media/AudioGainSys.aidl
similarity index 81%
rename from media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
rename to media/libaudioclient/aidl/android/media/AudioGainSys.aidl
index 9e04e82..426f4ed 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioGainSys.aidl
@@ -13,14 +13,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.media;
 
 /**
+ * Provides additional runtime information for AudioGain, used by the framework.
+ *
  * {@hide}
  */
-@Backing(type="int")
-enum AudioEncapsulationMode {
-     NONE = 0,
-     ELEMENTARY_STREAM = 1,
-     HANDLE = 2,
+parcelable AudioGainSys {
+    int index;
+    boolean isInput;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
deleted file mode 100644
index bfc0eb0..0000000
--- a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioInputFlags {
-    FAST       = 0,
-    HW_HOTWORD = 1,
-    RAW        = 2,
-    SYNC       = 3,
-    MMAP_NOIRQ = 4,
-    VOIP_TX    = 5,
-    HW_AV_SYNC = 6,
-    DIRECT     = 7,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
index 876ef9b..b01f902 100644
--- a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -17,7 +17,8 @@
 package android.media;
 
 import android.media.AudioPatch;
-import android.media.audio.common.AudioFormat;
+import android.media.audio.common.AudioChannelLayout;
+import android.media.audio.common.AudioFormatDescription;
 
 /**
  * {@hide}
@@ -26,10 +27,10 @@
     /** Interpreted as audio_io_handle_t. */
     int ioHandle;
     AudioPatch patch;
+    boolean isInput;
     int samplingRate;
-    AudioFormat format;
-    /** Interpreted as audio_channel_mask_t. */
-    int channelMask;
+    AudioFormatDescription format;
+    AudioChannelLayout channelMask;
     long frameCount;
     long frameCountHAL;
     /** Only valid for output. */
diff --git a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
deleted file mode 100644
index f9b25bf..0000000
--- a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-union AudioIoFlags {
-    /** Bitmask indexed by AudioInputFlags. */
-    int input;
-    /** Bitmask indexed by AudioOutputFlags. */
-    int output;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index 7473372..88b0450 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -16,12 +16,12 @@
 
 package android.media;
 
-import android.media.AudioConfig;
-import android.media.AudioDevice;
 import android.media.AudioMixCallbackFlag;
 import android.media.AudioMixMatchCriterion;
 import android.media.AudioMixRouteFlag;
 import android.media.AudioMixType;
+import android.media.audio.common.AudioConfig;
+import android.media.audio.common.AudioDevice;
 
 /**
  * {@hide}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl b/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl
deleted file mode 100644
index d70b364..0000000
--- a/media/libaudioclient/aidl/android/media/AudioMixLatencyClass.aidl
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioMixLatencyClass {
-    LOW = 0,
-    NORMAL = 1,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
index e26a9e1..921a93a 100644
--- a/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMixMatchCriterionValue.aidl
@@ -16,15 +16,15 @@
 
 package android.media;
 
-import android.media.AudioSourceType;
-import android.media.AudioUsage;
+import android.media.audio.common.AudioSource;
+import android.media.audio.common.AudioUsage;
 
 /**
  * {@hide}
  */
 union AudioMixMatchCriterionValue {
     AudioUsage usage = AudioUsage.UNKNOWN;
-    AudioSourceType source;
+    AudioSource source;
     /** Interpreted as uid_t. */
     int uid;
     int userId;
diff --git a/media/libaudioclient/aidl/android/media/AudioMode.aidl b/media/libaudioclient/aidl/android/media/AudioMode.aidl
deleted file mode 100644
index 7067dd3..0000000
--- a/media/libaudioclient/aidl/android/media/AudioMode.aidl
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioMode {
-    INVALID = -2,
-    CURRENT = -1,
-    NORMAL = 0,
-    RINGTONE = 1,
-    IN_CALL = 2,
-    IN_COMMUNICATION = 3,
-    CALL_SCREEN = 4,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl b/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
deleted file mode 100644
index c86b3f0..0000000
--- a/media/libaudioclient/aidl/android/media/AudioOffloadInfo.aidl
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioConfigBase;
-import android.media.AudioEncapsulationMode;
-import android.media.AudioStreamType;
-import android.media.AudioUsage;
-import android.media.audio.common.AudioFormat;
-
-/**
- * {@hide}
- */
-parcelable AudioOffloadInfo {
-    /** Version of the info structure. Interpreted as a uint16_t version constant. */
-    int version;
-    /** Audio configuration. */
-    AudioConfigBase config;
-    /** Stream type. */
-    AudioStreamType streamType;
-    /** Bit rate in bits per second. */
-    int bitRate;
-    /** Duration in microseconds, -1 if unknown. */
-    long durationUs;
-    /** true if stream is tied to a video stream. */
-    boolean hasVideo;
-    /** true if streaming, false if local playback. */
-    boolean isStreaming;
-    int bitWidth;
-    /** Offload fragment size. */
-    int offloadBufferSize;
-    AudioUsage usage;
-    AudioEncapsulationMode encapsulationMode;
-    /** Content id from tuner HAL (0 if none). */
-    int contentId;
-    /** Sync id from tuner HAL (0 if none). */
-    int syncId;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
deleted file mode 100644
index cebd8f0..0000000
--- a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioOutputFlags {
-    DIRECT           = 0,
-    PRIMARY          = 1,
-    FAST             = 2,
-    DEEP_BUFFER      = 3,
-    COMPRESS_OFFLOAD = 4,
-    NON_BLOCKING     = 5,
-    HW_AV_SYNC       = 6,
-    TTS              = 7,
-    RAW              = 8,
-    SYNC             = 9,
-    IEC958_NONAUDIO  = 10,
-    DIRECT_PCM       = 11,
-    MMAP_NOIRQ       = 12,
-    VOIP_RX          = 13,
-    INCALL_MUSIC     = 14,
-    GAPLESS_OFFLOAD  = 15,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPort.aidl
index bf0e5b7..ff177c0 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPort.aidl
@@ -16,35 +16,13 @@
 
 package android.media;
 
-import android.media.AudioGain;
-import android.media.AudioPortConfig;
-import android.media.AudioPortExt;
-import android.media.AudioPortRole;
-import android.media.AudioPortType;
-import android.media.AudioProfile;
-import android.media.ExtraAudioDescriptor;
+import android.media.AudioPortSys;
+import android.media.audio.common.AudioPort;
 
 /**
  * {@hide}
  */
 parcelable AudioPort {
-    /** Port unique ID. Interpreted as audio_port_handle_t. */
-    int id;
-    /** Sink or source. */
-    AudioPortRole role;
-    /** Device, mix ... */
-    AudioPortType type;
-    @utf8InCpp String name;
-    /** AudioProfiles supported by this port (format, Rates, Channels). */
-    AudioProfile[] profiles;
-    /**
-     * ExtraAudioDescriptors supported by this port. The format is not unrecognized to the
-     * platform. The audio capability is described by a hardware descriptor.
-     */
-    ExtraAudioDescriptor[] extraAudioDescriptors;
-    /** Gain controllers. */
-    AudioGain[] gains;
-    /** Current audio port configuration. */
-    AudioPortConfig activeConfig;
-    AudioPortExt ext;
+    AudioPort hal;
+    AudioPortSys sys;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
index 2dd30a4..3a4ca31 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
@@ -16,44 +16,13 @@
 
 package android.media;
 
-import android.media.AudioGainConfig;
-import android.media.AudioIoFlags;
-import android.media.AudioPortConfigExt;
-import android.media.AudioPortConfigType;
-import android.media.AudioPortRole;
-import android.media.AudioPortType;
-import android.media.audio.common.AudioFormat;
+import android.media.AudioPortConfigSys;
+import android.media.audio.common.AudioPortConfig;
 
 /**
  * {@hide}
  */
 parcelable AudioPortConfig {
-    /**
-     * Port unique ID.
-     * Interpreted as audio_port_handle_t.
-     */
-    int id;
-    /** Sink or source. */
-    AudioPortRole role;
-    /** Device, mix ... */
-    AudioPortType type;
-    /** Bitmask, indexed by AudioPortConfigType. */
-    int configMask;
-    /** Sampling rate in Hz. */
-    int sampleRate;
-    /**
-     * Channel mask, if applicable.
-     * Interpreted as audio_channel_mask_t.
-     * TODO: bitmask?
-     */
-    int channelMask;
-    /**
-     * Format, if applicable.
-     */
-    AudioFormat format;
-    /** Gain to apply, if applicable. */
-    AudioGainConfig gain;
-    /** Framework only: HW_AV_SYNC, DIRECT, ... */
-    AudioIoFlags flags;
-    AudioPortConfigExt ext;
+    AudioPortConfig hal;
+    AudioPortConfigSys sys;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
deleted file mode 100644
index a99aa9b..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable AudioPortConfigDeviceExt {
-    /**
-     * Module the device is attached to.
-     * Interpreted as audio_module_handle_t.
-     */
-    int hwModule;
-    /**
-     * Device type (e.g AUDIO_DEVICE_OUT_SPEAKER).
-     * Interpreted as audio_devices_t.
-     * TODO: Convert to a standalone AIDL representation.
-     */
-    int type;
-    /** Device address. "" if N/A. */
-    @utf8InCpp String address;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
deleted file mode 100644
index 5d635b6..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioPortConfigDeviceExt;
-import android.media.AudioPortConfigMixExt;
-import android.media.AudioPortConfigSessionExt;
-
-/**
- * {@hide}
- */
-union AudioPortConfigExt {
-    /**
-     * This represents an empty union. Value is ignored.
-     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
-     *             established.
-     */
-    boolean unspecified;
-    /** Device specific info. */
-    AudioPortConfigDeviceExt device;
-    /** Mix specific info. */
-    AudioPortConfigMixExt mix;
-    /** Session specific info. */
-    AudioPortConfigSessionExt session;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
deleted file mode 100644
index d3226f2..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioPortConfigMixExtUseCase;
-
-/**
- * {@hide}
- */
-parcelable AudioPortConfigMixExt {
-    /**
-     * Module the stream is attached to.
-     * Interpreted as audio_module_handle_t.
-     */
-    int hwModule;
-    /**
-     * I/O handle of the input/output stream.
-     * Interpreted as audio_io_handle_t.
-     */
-    int handle;
-    AudioPortConfigMixExtUseCase usecase;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
deleted file mode 100644
index c61f044..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioSourceType;
-import android.media.AudioStreamType;
-
-/**
- * {@hide}
- */
-union AudioPortConfigMixExtUseCase {
-    /**
-     * This to be set if the containing config has the AudioPortRole::NONE role.
-     * This represents an empty value (value is ignored).
-     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
-     *             established.
-     */
-    boolean unspecified;
-    /** This to be set if the containing config has the AudioPortRole::SOURCE role. */
-    AudioStreamType stream;
-    /** This to be set if the containing config has the AudioPortRole::SINK role. */
-    AudioSourceType source;
-}
diff --git a/media/libaudioclient/aidl/android/media/Int.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl
similarity index 71%
rename from media/libaudioclient/aidl/android/media/Int.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl
index 24f4d62..8692848 100644
--- a/media/libaudioclient/aidl/android/media/Int.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl
@@ -16,12 +16,17 @@
 
 package android.media;
 
+import android.media.AudioPortExtSys;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+
 /**
- * This is a simple wrapper around an 'int', putting it in a parcelable, so it can be used as an
- * inout parameter, be made @nullable, etc.
- *
  * {@hide}
  */
-parcelable Int {
-    int value;
+parcelable AudioPortConfigSys {
+    /** Sink or source. */
+    AudioPortRole role;
+    /** Device, mix ... */
+    AudioPortType type;
+    AudioPortExtSys ext;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
deleted file mode 100644
index 6e22b8d..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioPortConfigType {
-    SAMPLE_RATE  = 0,
-    CHANNEL_MASK = 1,
-    FORMAT       = 2,
-    GAIN         = 3,
-    FLAGS        = 4,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
similarity index 85%
rename from media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
index b758f23..0f5a9b6 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortDeviceExt.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortDeviceExtSys.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -16,15 +16,12 @@
 
 package android.media;
 
-import android.media.AudioDevice;
-
 /**
  * {@hide}
  */
-parcelable AudioPortDeviceExt {
+parcelable AudioPortDeviceExtSys {
     /** Module the device is attached to. Interpreted as audio_module_handle_t. */
     int hwModule;
-    AudioDevice device;
     /** Bitmask, indexed by AudioEncapsulationMode. */
     int encapsulationModes;
     /** Bitmask, indexed by AudioEncapsulationMetadataType. */
diff --git a/media/libaudioclient/aidl/android/media/AudioPortExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortExt.aidl
deleted file mode 100644
index 453784b..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortExt.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioPortDeviceExt;
-import android.media.AudioPortMixExt;
-import android.media.AudioPortSessionExt;
-
-/**
- * {@hide}
- */
-union AudioPortExt {
-    /**
-     * This represents an empty union. Value is ignored.
-     * TODO(ytai): replace with the canonical representation for an empty union, as soon as it is
-     *             established.
-     */
-    boolean unspecified;
-    /** Device specific info. */
-    AudioPortDeviceExt device;
-    /** Mix specific info. */
-    AudioPortMixExt mix;
-    /** Session specific info. */
-    AudioPortSessionExt session;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl b/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
similarity index 62%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
copy to media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
index b08a604..2cdf4f6 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationType.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortExtSys.aidl
@@ -16,14 +16,19 @@
 
 package android.media;
 
+import android.media.AudioPortDeviceExtSys;
+import android.media.AudioPortMixExtSys;
+
 /**
- * Audio encapsulation type is used to describe if the audio data should be sent with a particular
- * encapsulation type or not.
- *
  * {@hide}
  */
-@Backing(type="int")
-enum AudioEncapsulationType {
-    NONE     = 0,
-    IEC61937 = 1,
-}
\ No newline at end of file
+union AudioPortExtSys {
+    /**
+     * This represents an empty union. Value is ignored.
+     */
+    boolean unspecified;
+    /** System-only parameters when the port is an audio device. */
+    AudioPortDeviceExtSys device;
+    /** System-only parameters when the port is an audio mix. */
+    AudioPortMixExtSys mix;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl
deleted file mode 100644
index 62cdb8e..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortMixExt.aidl
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioMixLatencyClass;
-
-/**
- * {@hide}
- */
-parcelable AudioPortMixExt {
-    /** Module the stream is attached to. Interpreted as audio_module_handle_t. */
-    int hwModule;
-    /** I/O handle of the input/output stream. Interpreted as audio_io_handle_t. */
-    int handle;
-    /** Latency class */
-    AudioMixLatencyClass latencyClass;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioStandard.aidl b/media/libaudioclient/aidl/android/media/AudioPortMixExtSys.aidl
similarity index 81%
copy from media/libaudioclient/aidl/android/media/AudioStandard.aidl
copy to media/libaudioclient/aidl/android/media/AudioPortMixExtSys.aidl
index e131d0d..5999885 100644
--- a/media/libaudioclient/aidl/android/media/AudioStandard.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortMixExtSys.aidl
@@ -13,15 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.media;
 
 /**
- * The audio standard that describe audio playback/capture capabilites.
- *
  * {@hide}
  */
-@Backing(type="int")
-enum AudioStandard {
-    NONE = 0,
-    EDID = 1,
+parcelable AudioPortMixExtSys {
+    /** Module the stream is attached to. Interpreted as audio_module_handle_t. */
+    int hwModule;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl
deleted file mode 100644
index dbca168..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPortSessionExt.aidl
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable AudioPortSessionExt {
-    /** Audio session. Interpreted as audio_session_t. */
-    int session;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
new file mode 100644
index 0000000..f3b5c19
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGainSys;
+import android.media.AudioPortConfig;
+import android.media.AudioPortExtSys;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.AudioProfileSys;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortSys {
+    /** Sink or source. */
+    AudioPortRole role;
+    /** Device, mix ... */
+    AudioPortType type;
+    /** System-only parameters for each AudioProfile from 'port.profiles'. */
+    AudioProfileSys[] profiles;
+    /** System-only parameters for each AudioGain from 'port.gains'. */
+    AudioGainSys[] gains;
+    /** Current audio port configuration. */
+    AudioPortConfig activeConfig;
+    /** System-only extra parameters for 'port.ext'. */
+    AudioPortExtSys ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioProfile.aidl b/media/libaudioclient/aidl/android/media/AudioProfile.aidl
deleted file mode 100644
index afb288f..0000000
--- a/media/libaudioclient/aidl/android/media/AudioProfile.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioEncapsulationType;
-import android.media.audio.common.AudioFormat;
-
-/**
- * {@hide}
- */
-parcelable AudioProfile {
-    @utf8InCpp String name;
-    /** The format for an audio profile should only be set when initialized. */
-    AudioFormat format;
-    /** Interpreted as audio_channel_mask_t. */
-    int[] channelMasks;
-    int[] samplingRates;
-    boolean isDynamicFormat;
-    boolean isDynamicChannels;
-    boolean isDynamicRate;
-    AudioEncapsulationType encapsulationType;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/libaudioclient/aidl/android/media/AudioProfileSys.aidl
similarity index 68%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
copy to media/libaudioclient/aidl/android/media/AudioProfileSys.aidl
index 9e04e82..329c9d5 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioProfileSys.aidl
@@ -13,14 +13,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.media;
 
 /**
+ * Provides indication whether the parameters of the AudioProfiles in the
+ * AudioPort are dynamic. Each instance of AudioProfileSys corresponds
+ * to an instance of AudioProfile.
+ *
  * {@hide}
  */
-@Backing(type="int")
-enum AudioEncapsulationMode {
-     NONE = 0,
-     ELEMENTARY_STREAM = 1,
-     HANDLE = 2,
+parcelable AudioProfileSys {
+    boolean isDynamicFormat;
+    boolean isDynamicChannels;
+    boolean isDynamicRate;
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
deleted file mode 100644
index 8673b92..0000000
--- a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioSourceType {
-    INVALID = -1,
-    DEFAULT = 0,
-    MIC = 1,
-    VOICE_UPLINK = 2,
-    VOICE_DOWNLINK = 3,
-    VOICE_CALL = 4,
-    CAMCORDER = 5,
-    VOICE_RECOGNITION = 6,
-    VOICE_COMMUNICATION = 7,
-    REMOTE_SUBMIX = 8,
-    UNPROCESSED = 9,
-    VOICE_PERFORMANCE = 10,
-    ECHO_REFERENCE = 1997,
-    FM_TUNER = 1998,
-    /**
-     * A low-priority, preemptible audio source for for background software
-     * hotword detection. Same tuning as VOICE_RECOGNITION.
-     * Used only internally by the framework.
-     */
-    HOTWORD = 1999,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioStreamType.aidl b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
deleted file mode 100644
index d7778828..0000000
--- a/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioStreamType {
-    DEFAULT = -1,
-    VOICE_CALL = 0,
-    SYSTEM = 1,
-    RING = 2,
-    MUSIC = 3,
-    ALARM = 4,
-    NOTIFICATION = 5,
-    BLUETOOTH_SCO = 6,
-    ENFORCED_AUDIBLE = 7,
-    DTMF = 8,
-    TTS = 9,
-    ACCESSIBILITY = 10,
-    ASSISTANT = 11,
-    /** For dynamic policy output mixes. Only used by the audio policy */
-    REROUTING = 12,
-    /** For audio flinger tracks volume. Only used by the audioflinger */
-    PATCH = 13,
-    /** stream for corresponding to AUDIO_USAGE_CALL_ASSISTANT */
-    CALL_ASSISTANT = 14,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioUsage.aidl b/media/libaudioclient/aidl/android/media/AudioUsage.aidl
deleted file mode 100644
index 66c5c30..0000000
--- a/media/libaudioclient/aidl/android/media/AudioUsage.aidl
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioUsage {
-    UNKNOWN = 0,
-    MEDIA = 1,
-    VOICE_COMMUNICATION = 2,
-    VOICE_COMMUNICATION_SIGNALLING = 3,
-    ALARM = 4,
-    NOTIFICATION = 5,
-    NOTIFICATION_TELEPHONY_RINGTONE = 6,
-    NOTIFICATION_COMMUNICATION_REQUEST = 7,
-    NOTIFICATION_COMMUNICATION_INSTANT = 8,
-    NOTIFICATION_COMMUNICATION_DELAYED = 9,
-    NOTIFICATION_EVENT = 10,
-    ASSISTANCE_ACCESSIBILITY = 11,
-    ASSISTANCE_NAVIGATION_GUIDANCE = 12,
-    ASSISTANCE_SONIFICATION = 13,
-    GAME = 14,
-    VIRTUAL_SOURCE = 15,
-    ASSISTANT = 16,
-    CALL_ASSISTANT = 17,
-    EMERGENCY = 1000,
-    SAFETY = 1001,
-    VEHICLE_STATUS = 1002,
-    ANNOUNCEMENT = 1003,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioUuid.aidl b/media/libaudioclient/aidl/android/media/AudioUuid.aidl
deleted file mode 100644
index bba9039..0000000
--- a/media/libaudioclient/aidl/android/media/AudioUuid.aidl
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-parcelable AudioUuid {
-    int timeLow;
-    int timeMid;
-    int timeHiAndVersion;
-    int clockSeq;
-    byte[] node;  // Length = 6
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl b/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
index 3a29a08..b95a1d3 100644
--- a/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioVolumeGroup.aidl
@@ -17,7 +17,7 @@
 package android.media;
 
 import android.media.AudioAttributesInternal;
-import android.media.AudioStreamType;
+import android.media.audio.common.AudioStreamType;
 
 /**
  * {@hide}
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
index 35a56eb..bcca04a 100644
--- a/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateEffectRequest.aidl
@@ -16,10 +16,10 @@
 
 package android.media;
 
-import android.media.AudioDevice;
+import android.content.AttributionSourceState;
 import android.media.EffectDescriptor;
 import android.media.IEffectClient;
-import android.content.AttributionSourceState;
+import android.media.audio.common.AudioDevice;
 
 /**
  * Input arguments of the createEffect() method.
diff --git a/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl b/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
index 0aa640a..e2755dd 100644
--- a/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateEffectResponse.aidl
@@ -29,4 +29,5 @@
     boolean enabled;
     @nullable IEffect effect;
     EffectDescriptor desc;
+    boolean alreadyExists;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
index 7e3c240..b938a3e 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordRequest.aidl
@@ -18,7 +18,7 @@
 
 import android.media.AudioAttributesInternal;
 import android.media.AudioClient;
-import android.media.AudioConfigBase;
+import android.media.audio.common.AudioConfigBase;
 
 /**
  * CreateRecordRequest contains all input arguments sent by AudioRecord to AudioFlinger
diff --git a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
index d78b3fc..7d159d0 100644
--- a/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateRecordResponse.aidl
@@ -18,6 +18,7 @@
 
 import android.media.IAudioRecord;
 import android.media.SharedFileRegion;
+import android.media.audio.common.AudioConfigBase;
 
 /**
  * CreateRecordResponse contains all output arguments returned by AudioFlinger to AudioRecord
@@ -43,4 +44,5 @@
     int portId;
     /** The newly created record. */
     @nullable IAudioRecord audioRecord;
+    AudioConfigBase serverConfig;
 }
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
index 014b3ca..212221e 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackRequest.aidl
@@ -18,9 +18,9 @@
 
 import android.media.AudioAttributesInternal;
 import android.media.AudioClient;
-import android.media.AudioConfig;
 import android.media.IAudioTrackCallback;
 import android.media.SharedFileRegion;
+import android.media.audio.common.AudioConfig;
 
 /**
  * CreateTrackInput contains all input arguments sent by AudioTrack to AudioFlinger
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index 40473fa..da6f454 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioStreamType;
+import android.media.audio.common.AudioStreamType;
 import android.media.IAudioTrack;
 
 /**
diff --git a/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl b/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
index 35a3d74..e5b5158 100644
--- a/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
+++ b/media/libaudioclient/aidl/android/media/EffectDescriptor.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioUuid;
+import android.media.audio.common.AudioUuid;
 
 /**
  * {@hide}
diff --git a/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl b/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl
deleted file mode 100644
index ec5b67a..0000000
--- a/media/libaudioclient/aidl/android/media/ExtraAudioDescriptor.aidl
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.AudioEncapsulationType;
-import android.media.AudioStandard;
-
-/**
- * The audio descriptor that descibes playback/capture capabilities according to
- * a particular standard.
- *
- * {@hide}
- */
-parcelable ExtraAudioDescriptor {
-    AudioStandard standard;
-    byte[] audioDescriptor;
-    AudioEncapsulationType encapsulationType;
-}
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index 164fb9d..f1848b6 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioStreamType;
+import android.media.audio.common.AudioStreamType;
 
 /**
  * {@hide}
@@ -31,4 +31,6 @@
     int portId;
     /** Interpreted as audio_io_handle_t[]. */
     int[] secondaryOutputs;
+    /** True if the track is connected to a spatializer mixer and actually spatialized */
+    boolean isSpatialized;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 7ffcc33..10da028 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -16,13 +16,10 @@
 
 package android.media;
 
-import android.media.AudioMode;
 import android.media.AudioPatch;
 import android.media.AudioPort;
 import android.media.AudioPortConfig;
-import android.media.AudioStreamType;
 import android.media.AudioUniqueIdUse;
-import android.media.AudioUuid;
 import android.media.AudioVibratorInfo;
 import android.media.CreateEffectRequest;
 import android.media.CreateEffectResponse;
@@ -41,7 +38,13 @@
 import android.media.MicrophoneInfoData;
 import android.media.RenderPosition;
 import android.media.TrackSecondaryOutputInfo;
-import android.media.audio.common.AudioFormat;
+import android.media.audio.common.AudioChannelLayout;
+import android.media.audio.common.AudioFormatDescription;
+import android.media.audio.common.AudioMMapPolicyInfo;
+import android.media.audio.common.AudioMMapPolicyType;
+import android.media.audio.common.AudioMode;
+import android.media.audio.common.AudioStreamType;
+import android.media.audio.common.AudioUuid;
 
 /**
  * {@hide}
@@ -62,7 +65,7 @@
      */
     int sampleRate(int /* audio_io_handle_t */ ioHandle);
 
-    AudioFormat format(int /* audio_io_handle_t */ output);
+    AudioFormatDescription format(int /* audio_io_handle_t */ output);
 
     long frameCount(int /* audio_io_handle_t */ ioHandle);
 
@@ -115,8 +118,8 @@
     // Retrieve the audio recording buffer size in bytes.
     // FIXME This API assumes a route, and so should be deprecated.
     long getInputBufferSize(int sampleRate,
-                            AudioFormat format,
-                            int /* audio_channel_mask_t */ channelMask);
+                            in AudioFormatDescription format,
+                            in AudioChannelLayout channelMask);
 
     OpenOutputResponse openOutput(in OpenOutputRequest request);
     int /* audio_io_handle_t */ openDuplicateOutput(int /* audio_io_handle_t */ output1,
@@ -216,4 +219,17 @@
     // This usually happens when there is a dynamic policy registered.
     void updateSecondaryOutputs(
             in TrackSecondaryOutputInfo[] trackSecondaryOutputInfos);
+
+    AudioMMapPolicyInfo[] getMmapPolicyInfos(AudioMMapPolicyType policyType);
+
+    int getAAudioMixerBurstCount();
+
+    int getAAudioHardwareBurstMinUsec();
+
+    void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
+
+    // When adding a new method, please review and update
+    // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
+    // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
+    // AudioFlinger.cpp IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 5f0a1de..8ac89a8 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -18,16 +18,10 @@
 
 import android.content.AttributionSourceState;
 
-import android.media.audio.common.AudioFormat;
-
 import android.media.AudioAttributesEx;
 import android.media.AudioAttributesInternal;
-import android.media.AudioConfig;
-import android.media.AudioConfigBase;
-import android.media.AudioDevice;
+import android.media.AudioDirectMode;
 import android.media.AudioMix;
-import android.media.AudioMode;
-import android.media.AudioOffloadInfo;
 import android.media.AudioOffloadMode;
 import android.media.AudioPatch;
 import android.media.AudioPolicyDeviceState;
@@ -38,10 +32,6 @@
 import android.media.AudioPortRole;
 import android.media.AudioPortType;
 import android.media.AudioProductStrategy;
-import android.media.AudioSourceType;
-import android.media.AudioStreamType;
-import android.media.AudioUsage;
-import android.media.AudioUuid;
 import android.media.AudioVolumeGroup;
 import android.media.DeviceRole;
 import android.media.EffectDescriptor;
@@ -51,8 +41,21 @@
 import android.media.IAudioPolicyServiceClient;
 import android.media.ICaptureStateListener;
 import android.media.INativeSpatializerCallback;
-import android.media.Int;
 import android.media.SoundTriggerSession;
+import android.media.audio.common.AudioConfig;
+import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioDevice;
+import android.media.audio.common.AudioDeviceDescription;
+import android.media.audio.common.AudioFormatDescription;
+import android.media.audio.common.AudioMode;
+import android.media.audio.common.AudioProfile;
+import android.media.audio.common.AudioOffloadInfo;
+import android.media.audio.common.AudioPort;
+import android.media.audio.common.AudioSource;
+import android.media.audio.common.AudioStreamType;
+import android.media.audio.common.AudioUsage;
+import android.media.audio.common.AudioUuid;
+import android.media.audio.common.Int;
 
 /**
  * IAudioPolicyService interface (see AudioPolicyInterface for method descriptions).
@@ -62,16 +65,15 @@
 interface IAudioPolicyService {
     oneway void onNewAudioModulesAvailable();
 
-    void setDeviceConnectionState(in AudioDevice device,
-                                  in AudioPolicyDeviceState state,
-                                  @utf8InCpp String deviceName,
-                                  in AudioFormat encodedFormat);
+    void setDeviceConnectionState(in AudioPolicyDeviceState state,
+                                  in android.media.audio.common.AudioPort port,
+                                  in AudioFormatDescription encodedFormat);
 
     AudioPolicyDeviceState getDeviceConnectionState(in AudioDevice device);
 
     void handleDeviceConfigChange(in AudioDevice device,
                                   @utf8InCpp String deviceName,
-                                  in AudioFormat encodedFormat);
+                                  in AudioFormatDescription encodedFormat);
 
     void setPhoneState(AudioMode state, int /* uid_t */ uid);
 
@@ -116,18 +118,18 @@
                           int indexMax);
 
     void setStreamVolumeIndex(AudioStreamType stream,
-                              int /* audio_devices_t */ device,
+                              in AudioDeviceDescription device,
                               int index);
 
     int getStreamVolumeIndex(AudioStreamType stream,
-                             int /* audio_devices_t */ device);
+                             in AudioDeviceDescription device);
 
     void setVolumeIndexForAttributes(in AudioAttributesInternal attr,
-                                     int /* audio_devices_t */ device,
+                                     in AudioDeviceDescription device,
                                      int index);
 
     int getVolumeIndexForAttributes(in AudioAttributesInternal attr,
-                                    int /* audio_devices_t */ device);
+                                    in AudioDeviceDescription device);
 
     int getMaxVolumeIndexForAttributes(in AudioAttributesInternal attr);
 
@@ -135,9 +137,7 @@
 
     int /* product_strategy_t */ getStrategyForStream(AudioStreamType stream);
 
-    int /* bitmask of audio_devices_t */ getDevicesForStream(AudioStreamType stream);
-
-    AudioDevice[] getDevicesForAttributes(in AudioAttributesEx attr);
+    AudioDevice[] getDevicesForAttributes(in AudioAttributesEx attr, boolean forVolume);
 
     int /* audio_io_handle_t */ getOutputForEffect(in EffectDescriptor desc);
 
@@ -157,7 +157,7 @@
 
     boolean isStreamActiveRemotely(AudioStreamType stream, int inPastMs);
 
-    boolean isSourceActive(AudioSourceType source);
+    boolean isSourceActive(AudioSource source);
 
     /**
      * On input, count represents the maximum length of the returned array.
@@ -172,7 +172,7 @@
                                                        @utf8InCpp String opPackageName,
                                                        in AudioUuid uuid,
                                                        int priority,
-                                                       AudioSourceType source);
+                                                       AudioSource source);
 
     int /* audio_unique_id_t */ addStreamDefaultEffect(in AudioUuid type,
                                                        @utf8InCpp String opPackageName,
@@ -214,8 +214,8 @@
                        inout Int count,
                        out AudioPort[] ports);
 
-    /** Get attributes for a given audio port. */
-    AudioPort getAudioPort(in AudioPort port);
+    /** Get attributes for the audio port with the given id (AudioPort.hal.id field). */
+    AudioPort getAudioPort(int /* audio_port_handle_t */ portId);
 
     /**
      * Create an audio patch between several source and sink ports.
@@ -270,7 +270,7 @@
 
     boolean getMasterMono();
 
-    float getStreamVolumeDB(AudioStreamType stream, int index, int /* audio_devices_t */ device);
+    float getStreamVolumeDB(AudioStreamType stream, int index, in AudioDeviceDescription device);
 
     /**
      * Populates supported surround formats and their enabled state in formats and formatsEnabled.
@@ -281,7 +281,7 @@
      * number of elements without actually retrieving them.
      */
     void getSurroundFormats(inout Int count,
-                            out AudioFormat[] formats,
+                            out AudioFormatDescription[] formats,
                             out boolean[] formatsEnabled);
 
     /**
@@ -293,15 +293,16 @@
      * number of elements without actually retrieving them.
      */
     void getReportedSurroundFormats(inout Int count,
-                                    out AudioFormat[] formats);
+                                    out AudioFormatDescription[] formats);
 
-    AudioFormat[] getHwOffloadEncodingFormatsSupportedForA2DP();
+    AudioFormatDescription[] getHwOffloadFormatsSupportedForBluetoothMedia(
+                                    in AudioDeviceDescription device);
 
-    void setSurroundFormatEnabled(AudioFormat audioFormat, boolean enabled);
+    void setSurroundFormatEnabled(in AudioFormatDescription audioFormat, boolean enabled);
 
-    void setAssistantUid(int /* uid_t */ uid);
+    void setAssistantServicesUids(in int[] /* uid_t[] */ uids);
 
-    void setHotwordDetectionServiceUid(int /* uid_t */ uid);
+    void setActiveAssistantServicesUids(in int[] /* uid_t[] */ activeUids);
 
     void setA11yServicesUids(in int[] /* uid_t[] */ uids);
 
@@ -309,6 +310,8 @@
 
     boolean isHapticPlaybackSupported();
 
+    boolean isUltrasoundSupported();
+
     AudioProductStrategy[] listAudioProductStrategies();
     int /* product_strategy_t */ getProductStrategyFromAudioAttributes(in AudioAttributesEx aa,
                                                                        boolean fallbackOnDefault);
@@ -331,22 +334,22 @@
     AudioDevice[] getDevicesForRoleAndStrategy(int /* product_strategy_t */ strategy,
                                                DeviceRole role);
 
-    void setDevicesRoleForCapturePreset(AudioSourceType audioSource,
+    void setDevicesRoleForCapturePreset(AudioSource audioSource,
                                         DeviceRole role,
                                         in AudioDevice[] devices);
 
-    void addDevicesRoleForCapturePreset(AudioSourceType audioSource,
+    void addDevicesRoleForCapturePreset(AudioSource audioSource,
                                         DeviceRole role,
                                         in AudioDevice[] devices);
 
-    void removeDevicesRoleForCapturePreset(AudioSourceType audioSource,
+    void removeDevicesRoleForCapturePreset(AudioSource audioSource,
                                            DeviceRole role,
                                            in AudioDevice[] devices);
 
-    void clearDevicesRoleForCapturePreset(AudioSourceType audioSource,
+    void clearDevicesRoleForCapturePreset(AudioSource audioSource,
                                           DeviceRole role);
 
-    AudioDevice[] getDevicesForRoleAndCapturePreset(AudioSourceType audioSource,
+    AudioDevice[] getDevicesForRoleAndCapturePreset(AudioSource audioSource,
                                                     DeviceRole role);
 
     boolean registerSoundTriggerCaptureStateListener(ICaptureStateListener listener);
@@ -375,4 +378,20 @@
     boolean canBeSpatialized(in @nullable AudioAttributesInternal attr,
                              in @nullable AudioConfig config,
                              in AudioDevice[] devices);
+
+    /**
+     * Query how the direct playback is currently supported on the device.
+     */
+    AudioDirectMode getDirectPlaybackSupport(in AudioAttributesInternal attr,
+                                              in AudioConfig config);
+
+    /**
+     * Query audio profiles available for direct playback on the current output device(s)
+     * for the specified audio attributes.
+     */
+    AudioProfile[] getDirectProfilesForAttributes(in AudioAttributesInternal attr);
+
+    // When adding a new method, please review and update
+    // AudioPolicyService.cpp AudioPolicyService::onTransact()
+    // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
index a7782b8..c0cdd96 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyServiceClient.aidl
@@ -16,10 +16,10 @@
 
 package android.media;
 
-import android.media.AudioConfigBase;
-import android.media.AudioSourceType;
 import android.media.EffectDescriptor;
 import android.media.RecordClientInfo;
+import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioSource;
 
 /**
  * {@hide}
@@ -43,7 +43,10 @@
                                         in AudioConfigBase deviceConfig,
                                         in EffectDescriptor[] effects,
                                         int /* audio_patch_handle_t */ patchHandle,
-                                        AudioSourceType source);
+                                        AudioSource source);
      /** Notifies a change of audio routing */
      void onRoutingUpdated();
+     /** Notifies a request for volume index ranges to be reset after they were observed as invalid
+      */
+     void onVolumeRangeInitRequest();
 }
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
index 813cd5c..6ec0405 100644
--- a/media/libaudioclient/aidl/android/media/IEffect.aidl
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -62,4 +62,8 @@
      * TODO(ytai): Explain how this should be used exactly.
      */
     SharedFileRegion getCblk();
+
+    // When adding a new method, please review and update
+    // Effects.cpp AudioFlinger::EffectHandle::onTransact()
+    // Effects.cpp IEFFECT_BINDER_METHOD_MACRO_LIST
 }
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index b871238..a61ad58 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -57,8 +57,10 @@
     boolean isHeadTrackingSupported();
 
     /** Reports the list of supported head tracking modes (see SpatializerHeadTrackingMode.aidl).
-     * The list can be empty if the spatializer implementation does not support head tracking or if
-     * no head tracking sensor is registered (see setHeadSensor() and setScreenSensor()).
+     * The list always contains SpatializerHeadTrackingMode.DISABLED and can include other modes
+     * if the spatializer effect implementation supports head tracking.
+     * The result does not depend on currently connected sensors but reflects the capabilities
+     * when sensors are available.
      */
     SpatializerHeadTrackingMode[] getSupportedHeadTrackingModes();
 
diff --git a/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
index 2e55526..75ff8e9 100644
--- a/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenInputRequest.aidl
@@ -16,9 +16,9 @@
 
 package android.media;
 
-import android.media.AudioConfig;
-import android.media.AudioDevice;
-import android.media.AudioSourceType;
+import android.media.audio.common.AudioConfig;
+import android.media.audio.common.AudioDevice;
+import android.media.audio.common.AudioSource;
 
 /**
  * {@hide}
@@ -30,7 +30,7 @@
     int input;
     AudioConfig config;
     AudioDevice device;
-    AudioSourceType source;
+    AudioSource source;
     /** Bitmask, indexed by AudioInputFlag. */
     int flags;
 }
diff --git a/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl b/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
index b613ba5..41bc38a 100644
--- a/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenInputResponse.aidl
@@ -16,8 +16,8 @@
 
 package android.media;
 
-import android.media.AudioConfig;
-import android.media.AudioDevice;
+import android.media.audio.common.AudioConfig;
+import android.media.audio.common.AudioDevice;
 
 /**
  * {@hide}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index 1541948..90e7ea6 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -16,9 +16,9 @@
 
 package android.media;
 
-import android.media.AudioConfig;
-import android.media.AudioConfigBase;
 import android.media.AudioPort;
+import android.media.audio.common.AudioConfig;
+import android.media.audio.common.AudioConfigBase;
 
 /**
  * {@hide}
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl b/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
index a051969..451a0bf 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputResponse.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioConfig;
+import android.media.audio.common.AudioConfig;
 
 /**
  * {@hide}
diff --git a/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl b/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
index 3280460..7dad58d 100644
--- a/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
+++ b/media/libaudioclient/aidl/android/media/RecordClientInfo.aidl
@@ -16,7 +16,7 @@
 
 package android.media;
 
-import android.media.AudioSourceType;
+import android.media.audio.common.AudioSource;
 
 /**
  * {@hide}
@@ -28,7 +28,7 @@
     int uid;
     /** Interpreted as audio_session_t. */
     int session;
-    AudioSourceType source;
+    AudioSource source;
     /** Interpreted as audio_port_handle_t. */
     int portId;
     boolean silenced;
diff --git a/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl b/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl
index a829e59..4b540a9 100644
--- a/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl
+++ b/media/libaudioclient/aidl/android/media/SoundTriggerSession.aidl
@@ -16,6 +16,8 @@
 
 package android.media;
 
+import android.media.audio.common.AudioDeviceDescription;
+
 /**
  * {@hide}
  */
@@ -24,6 +26,6 @@
     int session;
     /** Interpreted as audio_io_handle_t. */
     int ioHandle;
-    /** Interpreted as audio_devices_t. */
-    int device;
+    /** Device type. */
+    AudioDeviceDescription device;
 }
diff --git a/media/libaudioclient/aidl/android/media/SpatializationMode.aidl b/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
index 5d8fd93..eaaff37 100644
--- a/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
+++ b/media/libaudioclient/aidl/android/media/SpatializationMode.aidl
@@ -24,7 +24,7 @@
 @Backing(type="byte")
 enum SpatializationMode {
     /** The spatializer supports binaural mode (over headphones type devices). */
-    SPATIALIZATER_BINAURAL = 0,
+    SPATIALIZER_BINAURAL = 0,
     /** The spatializer supports transaural mode (over speaker type devices). */
-    SPATIALIZATER_TRANSAURAL = 1,
+    SPATIALIZER_TRANSAURAL = 1,
 }
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index b290aa8..969e3e6 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -46,6 +46,7 @@
     ],
     shared_libs: [
         "android.hardware.audio.common-util",
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index bd9e158..036e72e 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -58,7 +58,8 @@
 
 constexpr audio_mode_t kModes[] = {
     AUDIO_MODE_INVALID, AUDIO_MODE_CURRENT,          AUDIO_MODE_NORMAL,     AUDIO_MODE_RINGTONE,
-    AUDIO_MODE_IN_CALL, AUDIO_MODE_IN_COMMUNICATION, AUDIO_MODE_CALL_SCREEN};
+    AUDIO_MODE_IN_CALL, AUDIO_MODE_IN_COMMUNICATION, AUDIO_MODE_CALL_SCREEN,
+    AUDIO_MODE_CALL_REDIRECT, AUDIO_MODE_COMMUNICATION_REDIRECT};
 
 constexpr audio_session_t kSessionId[] = {AUDIO_SESSION_NONE, AUDIO_SESSION_OUTPUT_STAGE,
                                           AUDIO_SESSION_DEVICE};
@@ -231,7 +232,7 @@
     attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
     attributionSource.token = sp<BBinder>::make();
     track->set(AUDIO_STREAM_DEFAULT, sampleRate, format, channelMask, frameCount, flags, nullptr,
-               nullptr, notificationFrames, sharedBuffer, false, sessionId,
+               notificationFrames, sharedBuffer, false, sessionId,
                ((fast && sharedBuffer == 0) || offload) ? AudioTrack::TRANSFER_CALLBACK
                                                         : AudioTrack::TRANSFER_DEFAULT,
                offload ? &offloadInfo : nullptr, attributionSource, &attributes, false, 1.0f,
@@ -314,7 +315,7 @@
     attributionSource.packageName = std::string(mFdp.ConsumeRandomLengthString().c_str());
     attributionSource.token = sp<BBinder>::make();
     sp<AudioRecord> record = new AudioRecord(attributionSource);
-    record->set(AUDIO_SOURCE_DEFAULT, sampleRate, format, channelMask, frameCount, nullptr, nullptr,
+    record->set(AUDIO_SOURCE_DEFAULT, sampleRate, format, channelMask, frameCount, nullptr,
                 notificationFrames, false, sessionId,
                 fast ? AudioRecord::TRANSFER_CALLBACK : AudioRecord::TRANSFER_DEFAULT, flags,
                 getuid(), getpid(), &attributes, AUDIO_PORT_HANDLE_NONE);
@@ -354,7 +355,7 @@
     audioBuffer.frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
     record->obtainBuffer(&audioBuffer, waitCount, &nonContig);
     bool blocking = false;
-    record->read(audioBuffer.raw, audioBuffer.size, blocking);
+    record->read(audioBuffer.data(), audioBuffer.size(), blocking);
     record->getInputFramesLost();
     record->getFlags();
 
@@ -601,9 +602,10 @@
     media::OpenInputRequest request{};
     request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
     request.input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
-    request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+    request.config = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(config, true /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(deviceTypeAddr));
-    request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSourceType(source));
+    request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSource(source));
     request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
 
     media::OpenInputResponse response{};
@@ -658,9 +660,10 @@
     media::OpenOutputResponse response{};
 
     request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
-    request.halConfig = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
-    request.mixerConfig =
-            VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_base_t_AudioConfigBase(mixerConfig));
+    request.halConfig = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(config, false /*isInput*/));
+    request.mixerConfig = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(mixerConfig, false /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
     request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
 
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index 4ec69c7..e769303 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -23,34 +23,43 @@
 
 #include <android/media/AudioAttributesInternal.h>
 #include <android/media/AudioClient.h>
-#include <android/media/AudioConfig.h>
-#include <android/media/AudioConfigBase.h>
+#include <android/media/AudioDirectMode.h>
 #include <android/media/AudioDualMonoMode.h>
-#include <android/media/AudioEncapsulationMode.h>
-#include <android/media/AudioEncapsulationMetadataType.h>
-#include <android/media/AudioEncapsulationType.h>
 #include <android/media/AudioFlag.h>
-#include <android/media/AudioGain.h>
-#include <android/media/AudioGainMode.h>
-#include <android/media/AudioInputFlags.h>
 #include <android/media/AudioIoConfigEvent.h>
 #include <android/media/AudioIoDescriptor.h>
-#include <android/media/AudioMixLatencyClass.h>
-#include <android/media/AudioMode.h>
-#include <android/media/AudioOutputFlags.h>
 #include <android/media/AudioPlaybackRate.h>
 #include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfigType.h>
-#include <android/media/AudioPortDeviceExt.h>
-#include <android/media/AudioPortExt.h>
-#include <android/media/AudioPortMixExt.h>
-#include <android/media/AudioPortSessionExt.h>
-#include <android/media/AudioProfile.h>
+#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortDeviceExtSys.h>
 #include <android/media/AudioTimestampInternal.h>
 #include <android/media/AudioUniqueIdUse.h>
 #include <android/media/EffectDescriptor.h>
-#include <android/media/ExtraAudioDescriptor.h>
 #include <android/media/TrackSecondaryOutputInfo.h>
+#include <android/media/audio/common/AudioChannelLayout.h>
+#include <android/media/audio/common/AudioConfig.h>
+#include <android/media/audio/common/AudioConfigBase.h>
+#include <android/media/audio/common/AudioContentType.h>
+#include <android/media/audio/common/AudioDeviceDescription.h>
+#include <android/media/audio/common/AudioEncapsulationMetadataType.h>
+#include <android/media/audio/common/AudioEncapsulationMode.h>
+#include <android/media/audio/common/AudioEncapsulationType.h>
+#include <android/media/audio/common/AudioFormatDescription.h>
+#include <android/media/audio/common/AudioGain.h>
+#include <android/media/audio/common/AudioGainConfig.h>
+#include <android/media/audio/common/AudioGainMode.h>
+#include <android/media/audio/common/AudioInputFlags.h>
+#include <android/media/audio/common/AudioMode.h>
+#include <android/media/audio/common/AudioOffloadInfo.h>
+#include <android/media/audio/common/AudioOutputFlags.h>
+#include <android/media/audio/common/AudioPortExt.h>
+#include <android/media/audio/common/AudioPortMixExt.h>
+#include <android/media/audio/common/AudioProfile.h>
+#include <android/media/audio/common/AudioSource.h>
+#include <android/media/audio/common/AudioStandard.h>
+#include <android/media/audio/common/AudioUsage.h>
+#include <android/media/audio/common/AudioUuid.h>
+#include <android/media/audio/common/ExtraAudioDescriptor.h>
 
 #include <android/media/SharedFileRegion.h>
 #include <binder/IMemory.h>
@@ -86,19 +95,9 @@
 ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
 ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
 
-// The legacy enum is unnamed. Thus, we use int32_t.
-ConversionResult<int32_t> aidl2legacy_AudioPortConfigType_int32_t(
-        media::AudioPortConfigType aidl);
-// The legacy enum is unnamed. Thus, we use int32_t.
-ConversionResult<media::AudioPortConfigType> legacy2aidl_int32_t_AudioPortConfigType(
-        int32_t legacy);
-
 ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
 ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
 
-ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy);
-
 ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
 ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
 
@@ -116,10 +115,10 @@
 ConversionResult<std::optional<std::string_view>>
 legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy);
 
-ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
         media::AudioIoConfigEvent aidl);
-ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
-        audio_io_config_event legacy);
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
+        audio_io_config_event_t legacy);
 
 ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
         media::AudioPortRole aidl);
@@ -131,36 +130,59 @@
 ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
         audio_port_type_t legacy);
 
-ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
-        media::audio::common::AudioFormat aidl);
-ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
-        audio_format_t legacy);
+ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+        const media::audio::common::AudioChannelLayout& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioChannelLayout>
+legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
+
+ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
+        const media::audio::common::AudioDeviceDescription& aidl);
+ConversionResult<media::audio::common::AudioDeviceDescription>
+legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
+
+status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl,
+        audio_devices_t* legacyType, char* legacyAddress);
+status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl,
+        audio_devices_t* legacyType, String8* legacyAddress);
+status_t aidl2legacy_AudioDevice_audio_device(
+        const media::audio::common::AudioDevice& aidl,
+        audio_devices_t* legacyType, std::string* legacyAddress);
+ConversionResult<media::audio::common::AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const char* legacyAddress);
+ConversionResult<media::audio::common::AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+        audio_devices_t legacyType, const String8& legacyAddress);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
+        const media::audio::common::AudioFormatDescription& aidl);
+ConversionResult<media::audio::common::AudioFormatDescription>
+legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
 
 ConversionResult<audio_gain_mode_t>
-aidl2legacy_AudioGainMode_audio_gain_mode_t(media::AudioGainMode aidl);
-ConversionResult<media::AudioGainMode>
+aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
+ConversionResult<media::audio::common::AudioGainMode>
 legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
 
 ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
 ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
 
-ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy);
-
 ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
-        const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type);
-ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
-        const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type);
+        const media::audio::common::AudioGainConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGainConfig>
+legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
 
-ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
-        media::AudioInputFlags aidl);
-ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
-        audio_input_flags_t legacy);
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
 
-ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
-        media::AudioOutputFlags aidl);
-ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
-        audio_output_flags_t legacy);
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
 
 ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
         int32_t aidl);
@@ -173,40 +195,43 @@
         audio_output_flags_t legacy);
 
 ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
-        const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type);
-ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
-        const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type);
+        const media::audio::common::AudioIoFlags& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+        const audio_io_flags& legacy, bool isInput);
 
 ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioPortConfigDeviceExt_audio_port_config_device_ext(
-        const media::AudioPortConfigDeviceExt& aidl);
-ConversionResult<media::AudioPortConfigDeviceExt>
-legacy2aidl_audio_port_config_device_ext_AudioPortConfigDeviceExt(
-        const audio_port_config_device_ext& legacy);
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+        const media::audio::common::AudioPortDeviceExt& aidl,
+        const media::AudioPortDeviceExtSys& aidlDeviceExt);
+status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+        const audio_port_config_device_ext& legacy,
+        media::audio::common::AudioPortDeviceExt* aidl,
+        media::AudioPortDeviceExtSys* aidlDeviceExt);
 
 ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
-        media::AudioStreamType aidl);
-ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
-        audio_stream_type_t legacy);
+        media::audio::common::AudioStreamType aidl);
+ConversionResult<media::audio::common::AudioStreamType>
+legacy2aidl_audio_stream_type_t_AudioStreamType(audio_stream_type_t legacy);
 
-ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
-        media::AudioSourceType aidl);
-ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
+        media::audio::common::AudioSource aidl);
+ConversionResult<media::audio::common::AudioSource>
+        legacy2aidl_audio_source_t_AudioSource(
         audio_source_t legacy);
 
 ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
 ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
 
-ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
-        const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role);
-ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
-        const audio_port_config_mix_ext& legacy, audio_port_role_t role);
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
+        const media::audio::common::AudioPortMixExt& aidl, media::AudioPortRole role,
+        const media::AudioPortMixExtSys& aidlMixExt);
+status_t legacy2aidl_AudioPortMixExt(
+        const audio_port_config_mix_ext& legacy, audio_port_role_t role,
+        media::audio::common::AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt);
 
 ConversionResult<audio_port_config_session_ext>
-aidl2legacy_AudioPortConfigSessionExt_audio_port_config_session_ext(
-        const media::AudioPortConfigSessionExt& aidl);
-ConversionResult<media::AudioPortConfigSessionExt>
-legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
+aidl2legacy_int32_t_audio_port_config_session_ext(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
         const audio_port_config_session_ext& legacy);
 
 ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
@@ -221,7 +246,6 @@
 
 ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
         const media::AudioIoDescriptor& aidl);
-
 ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
         const sp<AudioIoDescriptor>& legacy);
 
@@ -231,13 +255,14 @@
         const AudioClient& legacy);
 
 ConversionResult<audio_content_type_t>
-aidl2legacy_AudioContentType_audio_content_type_t(media::AudioContentType aidl);
-ConversionResult<media::AudioContentType>
+aidl2legacy_AudioContentType_audio_content_type_t(
+        media::audio::common::AudioContentType aidl);
+ConversionResult<media::audio::common::AudioContentType>
 legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
 
 ConversionResult<audio_usage_t>
-aidl2legacy_AudioUsage_audio_usage_t(media::AudioUsage aidl);
-ConversionResult<media::AudioUsage>
+aidl2legacy_AudioUsage_audio_usage_t(media::audio::common::AudioUsage aidl);
+ConversionResult<media::audio::common::AudioUsage>
 legacy2aidl_audio_usage_t_AudioUsage(audio_usage_t legacy);
 
 ConversionResult<audio_flags_mask_t>
@@ -256,24 +281,27 @@
 legacy2aidl_audio_attributes_t_AudioAttributesInternal(const audio_attributes_t& legacy);
 
 ConversionResult<audio_encapsulation_mode_t>
-aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(media::AudioEncapsulationMode aidl);
-ConversionResult<media::AudioEncapsulationMode>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
+        media::audio::common::AudioEncapsulationMode aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMode>
 legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
 
 ConversionResult<audio_offload_info_t>
-aidl2legacy_AudioOffloadInfo_audio_offload_info_t(const media::AudioOffloadInfo& aidl);
-ConversionResult<media::AudioOffloadInfo>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
+        const media::audio::common::AudioOffloadInfo& aidl);
+ConversionResult<media::audio::common::AudioOffloadInfo>
 legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
 
 ConversionResult<audio_config_t>
-aidl2legacy_AudioConfig_audio_config_t(const media::AudioConfig& aidl);
-ConversionResult<media::AudioConfig>
-legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy);
+aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
 
 ConversionResult<audio_config_base_t>
-aidl2legacy_AudioConfigBase_audio_config_base_t(const media::AudioConfigBase& aidl);
-ConversionResult<media::AudioConfigBase>
-legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy);
+aidl2legacy_AudioConfigBase_audio_config_base_t(
+        const media::audio::common::AudioConfigBase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
 
 ConversionResult<sp<IMemory>>
 aidl2legacy_SharedFileRegion_IMemory(const media::SharedFileRegion& aidl);
@@ -291,8 +319,8 @@
 legacy2aidl_AudioTimestamp_AudioTimestampInternal(const AudioTimestamp& legacy);
 
 ConversionResult<audio_uuid_t>
-aidl2legacy_AudioUuid_audio_uuid_t(const media::AudioUuid& aidl);
-ConversionResult<media::AudioUuid>
+aidl2legacy_AudioUuid_audio_uuid_t(const media::audio::common::AudioUuid& aidl);
+ConversionResult<media::audio::common::AudioUuid>
 legacy2aidl_audio_uuid_t_AudioUuid(const audio_uuid_t& legacy);
 
 ConversionResult<effect_descriptor_t>
@@ -302,8 +330,8 @@
 
 ConversionResult<audio_encapsulation_metadata_type_t>
 aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
-        media::AudioEncapsulationMetadataType aidl);
-ConversionResult<media::AudioEncapsulationMetadataType>
+        media::audio::common::AudioEncapsulationMetadataType aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
 legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
         audio_encapsulation_metadata_type_t legacy);
 
@@ -317,37 +345,39 @@
 ConversionResult<int32_t>
 legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
 
-ConversionResult<audio_mix_latency_class_t>
-aidl2legacy_AudioMixLatencyClass_audio_mix_latency_class_t(
-        media::AudioMixLatencyClass aidl);
-ConversionResult<media::AudioMixLatencyClass>
-legacy2aidl_audio_mix_latency_class_t_AudioMixLatencyClass(
-        audio_mix_latency_class_t legacy);
-
 ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const media::AudioPortDeviceExt& aidl);
-ConversionResult<media::AudioPortDeviceExt>
-legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(const audio_port_device_ext& legacy);
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+        const media::audio::common::AudioPortDeviceExt& aidl,
+        const media::AudioPortDeviceExtSys& aidlDeviceExt);
+status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+        const audio_port_device_ext& legacy,
+        media::audio::common::AudioPortDeviceExt* aidl,
+        media::AudioPortDeviceExtSys* aidlDeviceExt);
 
 ConversionResult<audio_port_mix_ext>
-aidl2legacy_AudioPortMixExt_audio_port_mix_ext(const media::AudioPortMixExt& aidl);
-ConversionResult<media::AudioPortMixExt>
-legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy);
+aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+        const media::audio::common::AudioPortMixExt& aidl,
+        const media::AudioPortMixExtSys& aidlMixExt);
+status_t legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+        const audio_port_mix_ext& legacy,
+        media::audio::common::AudioPortMixExt* aidl,
+        media::AudioPortMixExtSys* aidlMixExt);
 
 ConversionResult<audio_port_session_ext>
-aidl2legacy_AudioPortSessionExt_audio_port_session_ext(const media::AudioPortSessionExt& aidl);
-ConversionResult<media::AudioPortSessionExt>
-legacy2aidl_audio_port_session_ext_AudioPortSessionExt(const audio_port_session_ext& legacy);
+aidl2legacy_int32_t_audio_port_session_ext(int32_t aidl);
+ConversionResult<int32_t>
+legacy2aidl_audio_port_session_ext_int32_t(const audio_port_session_ext& legacy);
 
 ConversionResult<audio_profile>
-aidl2legacy_AudioProfile_audio_profile(const media::AudioProfile& aidl);
-ConversionResult<media::AudioProfile>
-legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy);
+aidl2legacy_AudioProfile_audio_profile(
+        const media::audio::common::AudioProfile& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioProfile>
+legacy2aidl_audio_profile_AudioProfile(const audio_profile& legacy, bool isInput);
 
 ConversionResult<audio_gain>
-aidl2legacy_AudioGain_audio_gain(const media::AudioGain& aidl);
-ConversionResult<media::AudioGain>
-legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy);
+aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
 
 ConversionResult<audio_port_v7>
 aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl);
@@ -355,8 +385,8 @@
 legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy);
 
 ConversionResult<audio_mode_t>
-aidl2legacy_AudioMode_audio_mode_t(media::AudioMode aidl);
-ConversionResult<media::AudioMode>
+aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
+ConversionResult<media::audio::common::AudioMode>
 legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
 
 ConversionResult<audio_unique_id_use_t>
@@ -390,21 +420,21 @@
 legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
 
 ConversionResult<audio_standard_t>
-aidl2legacy_AudioStandard_audio_standard_t(media::AudioStandard aidl);
-ConversionResult<media::AudioStandard>
+aidl2legacy_AudioStandard_audio_standard_t(media::audio::common::AudioStandard aidl);
+ConversionResult<media::audio::common::AudioStandard>
 legacy2aidl_audio_standard_t_AudioStandard(audio_standard_t legacy);
 
 ConversionResult<audio_extra_audio_descriptor>
 aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
-        const media::ExtraAudioDescriptor& aidl);
-ConversionResult<media::ExtraAudioDescriptor>
+        const media::audio::common::ExtraAudioDescriptor& aidl);
+ConversionResult<media::audio::common::ExtraAudioDescriptor>
 legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
         const audio_extra_audio_descriptor& legacy);
 
 ConversionResult<audio_encapsulation_type_t>
 aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-        const media::AudioEncapsulationType& aidl);
-ConversionResult<media::AudioEncapsulationType>
+        const media::audio::common::AudioEncapsulationType& aidl);
+ConversionResult<media::audio::common::AudioEncapsulationType>
 legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
         const audio_encapsulation_type_t & legacy);
 
@@ -416,5 +446,13 @@
 legacy2aidl_TrackSecondaryOutputInfoPair_TrackSecondaryOutputInfo(
         const TrackSecondaryOutputInfoPair& legacy);
 
+ConversionResult<audio_direct_mode_t>
+aidl2legacy_AudioDirectMode_audio_direct_mode_t(media::AudioDirectMode aidl);
+ConversionResult<media::AudioDirectMode>
+legacy2aidl_audio_direct_mode_t_AudioDirectMode(audio_direct_mode_t legacy);
+
+ConversionResult<audio_direct_mode_t> aidl2legacy_int32_t_audio_direct_mode_t_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_direct_mode_t_int32_t_mask(audio_direct_mode_t legacy);
+
 
 }  // namespace android
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
index c1a2be3..8817c35 100644
--- a/media/libaudioclient/include/media/AidlConversionUtil.h
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -20,42 +20,14 @@
 #include <type_traits>
 #include <utility>
 
-#include <android-base/expected.h>
+#include <binder/Enums.h>
 #include <binder/Status.h>
+#include <error/Result.h>
 
 namespace android {
 
 template <typename T>
-using ConversionResult = base::expected<T, status_t>;
-
-// Convenience macros for working with ConversionResult, useful for writing converted for aggregate
-// types.
-
-#define VALUE_OR_RETURN(result)                                \
-    ({                                                         \
-        auto _tmp = (result);                                  \
-        if (!_tmp.ok()) return base::unexpected(_tmp.error()); \
-        std::move(_tmp.value());                               \
-    })
-
-#define RETURN_IF_ERROR(result) \
-    if (status_t _tmp = (result); _tmp != OK) return base::unexpected(_tmp);
-
-#define VALUE_OR_RETURN_STATUS(x)           \
-    ({                                      \
-       auto _tmp = (x);                     \
-       if (!_tmp.ok()) return _tmp.error(); \
-       std::move(_tmp.value());             \
-     })
-
-#define VALUE_OR_FATAL(result)                                        \
-    ({                                                                \
-       auto _tmp = (result);                                          \
-       LOG_ALWAYS_FATAL_IF(!_tmp.ok(),                                \
-                           "Function: %s Line: %d Failed result (%d)",\
-                           __FUNCTION__, __LINE__, _tmp.error());     \
-       std::move(_tmp.value());                                       \
-     })
+using ConversionResult = error::Result<T>;
 
 /**
  * A generic template to safely cast between integral types, respecting limits of the destination
@@ -106,6 +78,26 @@
 }
 
 /**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ * Uses a limit as maximum conversion items.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+status_t convertRangeWithLimit(InputIterator start,
+                      InputIterator end,
+                      OutputIterator out,
+                      const Func& itemConversion,
+                      const size_t limit) {
+    InputIterator last = end;
+    if (end - start > limit) {
+        last = start + limit;
+    }
+    for (InputIterator iter = start; (iter != last); ++iter, ++out) {
+        *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+    }
+    return OK;
+}
+
+/**
  * A generic template that helps convert containers of convertible types.
  */
 template<typename OutputContainer, typename InputContainer, typename Func>
@@ -119,6 +111,62 @@
     return output;
 }
 
+/**
+ * A generic template that helps convert containers of convertible types
+ * using an item conversion function with an additional parameter.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func, typename Parameter>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion, const Parameter& param) {
+    OutputContainer output;
+    auto ins = std::inserter(output, output.begin());
+    for (const auto& item : input) {
+        *ins = VALUE_OR_RETURN(itemConversion(item, param));
+    }
+    return output;
+}
+
+/**
+ * A generic template that helps to "zip" two input containers of the same size
+ * into a single vector of converted types. The conversion function must
+ * thus accept two arguments.
+ */
+template<typename OutputContainer, typename InputContainer1,
+        typename InputContainer2, typename Func>
+ConversionResult<OutputContainer>
+convertContainers(const InputContainer1& input1, const InputContainer2& input2,
+        const Func& itemConversion) {
+    auto iter2 = input2.begin();
+    OutputContainer output;
+    auto ins = std::inserter(output, output.begin());
+    for (const auto& item1 : input1) {
+        RETURN_IF_ERROR(iter2 != input2.end() ? OK : BAD_VALUE);
+        *ins = VALUE_OR_RETURN(itemConversion(item1, *iter2++));
+    }
+    return output;
+}
+
+/**
+ * A generic template that helps to "unzip" a per-element conversion into
+ * a pair of elements into a pair of containers. The conversion function
+ * must emit a pair of elements.
+ */
+template<typename OutputContainer1, typename OutputContainer2,
+        typename InputContainer, typename Func>
+ConversionResult<std::pair<OutputContainer1, OutputContainer2>>
+convertContainerSplit(const InputContainer& input, const Func& itemConversion) {
+    OutputContainer1 output1;
+    OutputContainer2 output2;
+    auto ins1 = std::inserter(output1, output1.begin());
+    auto ins2 = std::inserter(output2, output2.begin());
+    for (const auto& item : input) {
+        auto out_pair = VALUE_OR_RETURN(itemConversion(item));
+        *ins1 = out_pair.first;
+        *ins2 = out_pair.second;
+    }
+    return std::make_pair(output1, output2);
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // The code below establishes:
 // IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
@@ -223,6 +271,29 @@
 namespace aidl_utils {
 
 /**
+ * Return true if the value is valid for the AIDL enumeration.
+ */
+template <typename T>
+bool isValidEnum(T value) {
+    constexpr android::enum_range<T> er{};
+    return std::find(er.begin(), er.end(), value) != er.end();
+}
+
+// T is a "container" of enum binder types with a toString().
+template <typename T>
+std::string enumsToString(const T& t) {
+    std::string s;
+    for (const auto item : t) {
+        if (s.empty()) {
+            s = toString(item);
+        } else {
+            s.append("|").append(toString(item));
+        }
+    }
+    return s;
+}
+
+/**
  * Return the equivalent Android status_t from a binder exception code.
  *
  * Generally one should use statusTFromBinderStatus() instead.
diff --git a/media/libaudioclient/include/media/AudioCommonTypes.h b/media/libaudioclient/include/media/AudioCommonTypes.h
index 5f0c590..862a0f9 100644
--- a/media/libaudioclient/include/media/AudioCommonTypes.h
+++ b/media/libaudioclient/include/media/AudioCommonTypes.h
@@ -17,9 +17,75 @@
 
 #pragma once
 
+#include <functional>
+
+#include <android/media/audio/common/AudioChannelLayout.h>
+#include <android/media/audio/common/AudioDeviceDescription.h>
+#include <android/media/audio/common/AudioFormatDescription.h>
+#include <binder/Parcelable.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
-#include <binder/Parcelable.h>
+
+namespace {
+// see boost::hash_combine
+#if defined(__clang__)
+__attribute__((no_sanitize("unsigned-integer-overflow")))
+#endif
+static size_t hash_combine(size_t seed, size_t v) {
+    return std::hash<size_t>{}(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2);
+}
+}
+
+namespace std {
+
+// Note: when extending the types hashed below we need to account for the
+// possibility of processing types belonging to different versions of the type,
+// e.g. a HAL may be using a previous version of the AIDL interface.
+
+template<> struct hash<android::media::audio::common::AudioChannelLayout>
+{
+    std::size_t operator()(
+            const android::media::audio::common::AudioChannelLayout& acl) const noexcept {
+        using Tag = android::media::audio::common::AudioChannelLayout::Tag;
+        const size_t seed = std::hash<Tag>{}(acl.getTag());
+        switch (acl.getTag()) {
+            case Tag::none:
+                return hash_combine(seed, std::hash<int32_t>{}(acl.get<Tag::none>()));
+            case Tag::invalid:
+                return hash_combine(seed, std::hash<int32_t>{}(acl.get<Tag::invalid>()));
+            case Tag::indexMask:
+                return hash_combine(seed, std::hash<int32_t>{}(acl.get<Tag::indexMask>()));
+            case Tag::layoutMask:
+                return hash_combine(seed, std::hash<int32_t>{}(acl.get<Tag::layoutMask>()));
+            case Tag::voiceMask:
+                return hash_combine(seed, std::hash<int32_t>{}(acl.get<Tag::voiceMask>()));
+        }
+        return seed;
+    }
+};
+
+template<> struct hash<android::media::audio::common::AudioDeviceDescription>
+{
+    std::size_t operator()(
+            const android::media::audio::common::AudioDeviceDescription& add) const noexcept {
+        return hash_combine(
+                std::hash<android::media::audio::common::AudioDeviceType>{}(add.type),
+                std::hash<std::string>{}(add.connection));
+    }
+};
+
+template<> struct hash<android::media::audio::common::AudioFormatDescription>
+{
+    std::size_t operator()(
+            const android::media::audio::common::AudioFormatDescription& afd) const noexcept {
+        return hash_combine(
+                std::hash<android::media::audio::common::AudioFormatType>{}(afd.type),
+                hash_combine(
+                        std::hash<android::media::audio::common::PcmType>{}(afd.pcm),
+                        std::hash<std::string>{}(afd.encoding)));
+    }
+};
+}  // namespace std
 
 namespace android {
 
@@ -81,4 +147,3 @@
 static const volume_group_t VOLUME_GROUP_NONE = static_cast<volume_group_t>(-1);
 
 } // namespace android
-
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index dd4d2da..56884a3 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -40,7 +40,7 @@
 
 // ----------------------------------------------------------------------------
 
-class AudioEffect : public RefBase
+class AudioEffect : public virtual RefBase
 {
 public:
 
@@ -277,7 +277,7 @@
     static status_t removeStreamDefaultEffect(audio_unique_id_t id);
 
     /*
-     * Events used by callback function (effect_callback_t).
+     * Events used by callback function (legacy_callback_t).
      */
     enum event_type {
         EVENT_CONTROL_STATUS_CHANGED = 0,
@@ -287,6 +287,47 @@
         EVENT_FRAMES_PROCESSED = 4,
     };
 
+    class IAudioEffectCallback : public virtual RefBase {
+        friend AudioEffect;
+     protected:
+        /* The event is received when an application loses or
+         * gains the control of the effect engine. Loss of control happens
+         * if another application requests the use of the engine by creating an AudioEffect for
+         * the same effect type but with a higher priority. Control is returned when the
+         * application having the control deletes its AudioEffect object.
+         * @param isGranted: True if control has been granted. False if stolen.
+         */
+        virtual void onControlStatusChanged([[maybe_unused]] bool isGranted) {}
+
+        /* The event is received by all applications not having the
+         * control of the effect engine when the effect is enabled or disabled.
+         * @param isEnabled: True if enabled. False if disabled.
+         */
+        virtual void onEnableStatusChanged([[maybe_unused]] bool isEnabled) {}
+
+        /* The event is received by all applications not having the
+         * control of the effect engine when an effect parameter is changed.
+         * @param param: A vector containing the raw bytes of a effect_param_type containing
+         *   raw data representing a param type, value pair.
+         */
+        // TODO pass an AIDL parcel instead of effect_param_type
+        virtual void onParameterChanged([[maybe_unused]] std::vector<uint8_t> param) {}
+
+        /* The event is received when the binder connection to the mediaserver
+         * is no longer valid. Typically the server has been killed.
+         * @param errorCode: A code representing the type of error.
+         */
+        virtual void onError([[maybe_unused]] status_t errorCode) {}
+
+
+        /* The event is received when the audio server has processed a block of
+         * data.
+         * @param framesProcessed: The number of frames the audio server has
+         *   processed.
+         */
+        virtual void onFramesProcessed([[maybe_unused]] int32_t framesProcessed) {}
+    };
+
     /* Callback function notifying client application of a change in effect engine state or
      * configuration.
      * An effect engine can be shared by several applications but only one has the control
@@ -315,7 +356,7 @@
      *  - EVENT_ERROR:  status_t indicating the error (DEAD_OBJECT when media server dies).
      */
 
-    typedef void (*effect_callback_t)(int32_t event, void* user, void *info);
+    typedef void (*legacy_callback_t)(int32_t event, void* user, void *info);
 
 
     /* Constructor.
@@ -360,7 +401,7 @@
      * priority:    requested priority for effect control: the priority level corresponds to the
      *      value of priority parameter: negative values indicate lower priorities, positive values
      *      higher priorities, 0 being the normal priority.
-     * cbf:         optional callback function (see effect_callback_t)
+     * cbf:         optional callback function (see legacy_callback_t)
      * user:        pointer to context for use by the callback receiver.
      * sessionId:   audio session this effect is associated to.
      *      If equal to AUDIO_SESSION_OUTPUT_MIX, the effect will be global to
@@ -383,10 +424,20 @@
      *  - NO_INIT: audio flinger or audio hardware not initialized
      */
             status_t    set(const effect_uuid_t *type,
-                            const effect_uuid_t *uuid = NULL,
+                            const effect_uuid_t *uuid = nullptr,
                             int32_t priority = 0,
-                            effect_callback_t cbf = NULL,
-                            void* user = NULL,
+                            const wp<IAudioEffectCallback>& callback = nullptr,
+                            audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+                            audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+                            const AudioDeviceTypeAddr& device = {},
+                            bool probe = false,
+                            bool notifyFramesProcessed = false);
+
+            status_t    set(const effect_uuid_t *type,
+                            const effect_uuid_t *uuid,
+                            int32_t priority,
+                            legacy_callback_t cbf,
+                            void* user,
                             audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
                             audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
                             const AudioDeviceTypeAddr& device = {},
@@ -396,10 +447,21 @@
      * Same as above but with type and uuid specified by character strings.
      */
             status_t    set(const char *typeStr,
-                            const char *uuidStr = NULL,
+                            const char *uuidStr = nullptr,
                             int32_t priority = 0,
-                            effect_callback_t cbf = NULL,
-                            void* user = NULL,
+                            const wp<IAudioEffectCallback>& callback = nullptr,
+                            audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+                            audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+                            const AudioDeviceTypeAddr& device = {},
+                            bool probe = false,
+                            bool notifyFramesProcessed = false);
+
+
+            status_t    set(const char *typeStr,
+                            const char *uuidStr,
+                            int32_t priority,
+                            legacy_callback_t cbf,
+                            void* user,
                             audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
                             audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
                             const AudioDeviceTypeAddr& device = {},
@@ -535,18 +597,19 @@
 
 protected:
      android::content::AttributionSourceState mClientAttributionSource; // source for app op checks.
-     bool                    mEnabled = false;   // enable state
-     audio_session_t         mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
-     int32_t                 mPriority = 0;      // priority for effect control
-     status_t                mStatus = NO_INIT;  // effect status
-     bool                    mProbe = false;     // effect created in probe mode: all commands
-                                                 // are no ops because mIEffect is NULL
-     effect_callback_t       mCbf = nullptr;     // callback function for status, control and
-                                                 // parameter changes notifications
-     void*                   mUserData = nullptr;// client context for callback function
-     effect_descriptor_t     mDescriptor = {};   // effect descriptor
-     int32_t                 mId = -1;           // system wide unique effect engine instance ID
-     Mutex                   mLock;              // Mutex for mEnabled access
+     bool                     mEnabled = false;   // enable state
+     audio_session_t          mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
+     int32_t                  mPriority = 0;      // priority for effect control
+     status_t                 mStatus = NO_INIT;  // effect status
+     bool                     mProbe = false;     // effect created in probe mode: all commands
+                                                 // are no ops because mIEffect is nullptr
+
+     wp<IAudioEffectCallback> mCallback = nullptr; // callback interface for status, control and
+                                                   // parameter changes notifications
+     sp<IAudioEffectCallback> mLegacyWrapper = nullptr;
+     effect_descriptor_t      mDescriptor = {};   // effect descriptor
+     int32_t                  mId = -1;           // system wide unique effect engine instance ID
+     Mutex                    mLock;              // Mutex for mEnabled access
 
 
      // IEffectClient
diff --git a/media/libaudioclient/include/media/AudioIoDescriptor.h b/media/libaudioclient/include/media/AudioIoDescriptor.h
index 981d33a..405ec7d 100644
--- a/media/libaudioclient/include/media/AudioIoDescriptor.h
+++ b/media/libaudioclient/include/media/AudioIoDescriptor.h
@@ -17,9 +17,15 @@
 #ifndef ANDROID_AUDIO_IO_DESCRIPTOR_H
 #define ANDROID_AUDIO_IO_DESCRIPTOR_H
 
+#include <sstream>
+#include <string>
+
+#include <system/audio.h>
+#include <utils/RefBase.h>
+
 namespace android {
 
-enum audio_io_config_event {
+enum audio_io_config_event_t {
     AUDIO_OUTPUT_REGISTERED,
     AUDIO_OUTPUT_OPENED,
     AUDIO_OUTPUT_CLOSED,
@@ -33,41 +39,70 @@
 
 // audio input/output descriptor used to cache output configurations in client process to avoid
 // frequent calls through IAudioFlinger
-class AudioIoDescriptor : public RefBase {
+class AudioIoDescriptor : public virtual RefBase {
 public:
-    AudioIoDescriptor() :
-        mIoHandle(AUDIO_IO_HANDLE_NONE),
-        mSamplingRate(0), mFormat(AUDIO_FORMAT_DEFAULT), mChannelMask(AUDIO_CHANNEL_NONE),
-        mFrameCount(0), mFrameCountHAL(0), mLatency(0), mPortId(AUDIO_PORT_HANDLE_NONE)
-    {
-        memset(&mPatch, 0, sizeof(struct audio_patch));
-    }
+    AudioIoDescriptor() = default;
+    // For AUDIO_{INPUT|OUTPUT}_CLOSED events.
+    AudioIoDescriptor(audio_io_handle_t ioHandle) : mIoHandle(ioHandle) {}
+    // For AUDIO_CLIENT_STARTED events.
+    AudioIoDescriptor(
+            audio_io_handle_t ioHandle, const audio_patch& patch, audio_port_handle_t portId) :
+            mIoHandle(ioHandle), mPatch(patch), mPortId(portId) {}
+    // For everything else.
+    AudioIoDescriptor(
+            audio_io_handle_t ioHandle, const audio_patch& patch, bool isInput,
+            uint32_t samplingRate, audio_format_t format, audio_channel_mask_t channelMask,
+            size_t frameCount, size_t frameCountHal, uint32_t latency = 0,
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) :
+            mIoHandle(ioHandle), mPatch(patch), mIsInput(isInput),
+            mSamplingRate(samplingRate), mFormat(format), mChannelMask(channelMask),
+            mFrameCount(frameCount), mFrameCountHAL(frameCountHal), mLatency(latency),
+            mPortId(portId) {}
 
-    virtual ~AudioIoDescriptor() {}
-
-    audio_port_handle_t getDeviceId() {
+    audio_io_handle_t getIoHandle() const { return mIoHandle; }
+    const audio_patch& getPatch() const { return mPatch; }
+    bool getIsInput() const { return mIsInput; }
+    uint32_t getSamplingRate() const { return mSamplingRate; }
+    audio_format_t getFormat() const { return mFormat; }
+    audio_channel_mask_t getChannelMask() const { return mChannelMask; }
+    size_t getFrameCount() const { return mFrameCount; }
+    size_t getFrameCountHAL() const { return mFrameCountHAL; }
+    uint32_t getLatency() const { return mLatency; }
+    audio_port_handle_t getPortId() const { return mPortId; }
+    audio_port_handle_t getDeviceId() const {
         if (mPatch.num_sources != 0 && mPatch.num_sinks != 0) {
-            if (mPatch.sources[0].type == AUDIO_PORT_TYPE_MIX) {
-                // this is an output mix
-                // FIXME: the API only returns the first device in case of multiple device selection
-                return mPatch.sinks[0].id;
-            } else {
-                // this is an input mix
-                return mPatch.sources[0].id;
-            }
+            // FIXME: the API only returns the first device in case of multiple device selection
+            return mIsInput ? mPatch.sources[0].id : mPatch.sinks[0].id;
         }
         return AUDIO_PORT_HANDLE_NONE;
     }
+    void setPatch(const audio_patch& patch) { mPatch = patch; }
 
-    audio_io_handle_t       mIoHandle;
-    struct audio_patch      mPatch;
-    uint32_t                mSamplingRate;
-    audio_format_t          mFormat;
-    audio_channel_mask_t    mChannelMask;
-    size_t                  mFrameCount;
-    size_t                  mFrameCountHAL;
-    uint32_t                mLatency;   // only valid for output
-    audio_port_handle_t     mPortId;    // valid for event AUDIO_CLIENT_STARTED
+    std::string toDebugString() const {
+        std::ostringstream ss;
+        ss << mIoHandle << ", samplingRate " << mSamplingRate << ", "
+           << audio_format_to_string(mFormat) << ", "
+           << (audio_channel_mask_get_representation(mChannelMask) ==
+                   AUDIO_CHANNEL_REPRESENTATION_INDEX ?
+                   audio_channel_index_mask_to_string(mChannelMask) :
+                   (mIsInput ? audio_channel_in_mask_to_string(mChannelMask) :
+                           audio_channel_out_mask_to_string(mChannelMask)))
+           << ", frameCount " << mFrameCount << ", frameCountHAL " << mFrameCountHAL
+           << ", deviceId " << getDeviceId();
+        return ss.str();
+    }
+
+  private:
+    const audio_io_handle_t    mIoHandle = AUDIO_IO_HANDLE_NONE;
+          struct audio_patch   mPatch = {};
+    const bool                 mIsInput = false;
+    const uint32_t             mSamplingRate = 0;
+    const audio_format_t       mFormat = AUDIO_FORMAT_DEFAULT;
+    const audio_channel_mask_t mChannelMask = AUDIO_CHANNEL_NONE;
+    const size_t               mFrameCount = 0;
+    const size_t               mFrameCountHAL = 0;
+    const uint32_t             mLatency = 0;
+    const audio_port_handle_t  mPortId = AUDIO_PORT_HANDLE_NONE;
 };
 
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index f17ee3a..cb05dd9 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -40,14 +40,13 @@
 
 struct audio_track_cblk_t;
 class AudioRecordClientProxy;
-
 // ----------------------------------------------------------------------------
 
 class AudioRecord : public AudioSystem::AudioDeviceCallback
 {
 public:
 
-    /* Events used by AudioRecord callback function (callback_t).
+    /* Events used by AudioRecord callback function (legacy_callback_t).
      * Keep in sync with frameworks/base/media/java/android/media/AudioRecord.java NATIVE_EVENT_*.
      */
     enum event_type {
@@ -65,20 +64,26 @@
     };
 
     /* Client should declare a Buffer and pass address to obtainBuffer()
-     * and releaseBuffer().  See also callback_t for EVENT_MORE_DATA.
+     * and releaseBuffer().  See also legacy_callback_t for EVENT_MORE_DATA.
      */
 
     class Buffer
     {
+      friend AudioRecord;
     public:
-        // FIXME use m prefix
+        size_t size() const { return mSize; }
+        size_t getFrameCount() const { return frameCount; }
+        uint8_t* data() const { return ui8; }
+        // Leaving public for now to assist refactoring. This class will
+        // be replaced.
         size_t      frameCount;     // number of sample frames corresponding to size;
                                     // on input to obtainBuffer() it is the number of frames desired
                                     // on output from obtainBuffer() it is the number of available
                                     //    frames to be read
                                     // on input to releaseBuffer() it is currently ignored
 
-        size_t      size;           // input/output in bytes == frameCount * frameSize
+    private:
+        size_t      mSize;          // input/output in bytes == frameCount * frameSize
                                     // on input to obtainBuffer() it is ignored
                                     // on output from obtainBuffer() it is the number of available
                                     //    bytes to be read, which is frameCount * frameSize
@@ -90,7 +95,7 @@
         union {
             void*       raw;
             int16_t*    i16;        // signed 16-bit
-            int8_t*     i8;         // unsigned 8-bit, offset by 0x80
+            uint8_t*    ui8;        // unsigned 8-bit, offset by 0x80
                                     // input to obtainBuffer(): unused, output: pointer to buffer
         };
 
@@ -117,7 +122,28 @@
      *          - EVENT_NEW_IAUDIORECORD: unused.
      */
 
-    typedef void (*callback_t)(int event, void* user, void *info);
+    typedef void (*legacy_callback_t)(int event, void* user, void *info);
+
+    class IAudioRecordCallback : public virtual RefBase {
+        friend AudioRecord;
+     protected:
+        // Request for client to read newly available data.
+        // Used for TRANSFER_CALLBACK mode.
+        // Parameters:
+        //  - buffer : Buffer to read from
+        // Returns:
+        //  - Number of bytes actually consumed.
+        virtual size_t onMoreData([[maybe_unused]] const AudioRecord::Buffer& buffer) { return 0; }
+        // A buffer overrun occurred.
+        virtual void onOverrun() {}
+        // Record head is at the specified marker (see setMarkerPosition()).
+        virtual void onMarker([[maybe_unused]] uint32_t markerPosition) {}
+        // Record head is at a new position (see setPositionUpdatePeriod()).
+        virtual void onNewPos([[maybe_unused]] uint32_t newPos) {}
+        // IAudioRecord was recreated due to re-routing, server invalidation or
+        // server crash.
+        virtual void onNewIAudioRecord() {}
+    };
 
     /* Returns the minimum frame count required for the successful creation of
      * an AudioRecord object.
@@ -182,20 +208,37 @@
      * pAttributes:        If not NULL, supersedes inputSource for use case selection.
      * threadCanCallJava:  Not present in parameter list, and so is fixed at false.
      */
-
                         AudioRecord(audio_source_t inputSource,
                                     uint32_t sampleRate,
                                     audio_format_t format,
                                     audio_channel_mask_t channelMask,
                                     const android::content::AttributionSourceState& client,
                                     size_t frameCount = 0,
-                                    callback_t cbf = NULL,
-                                    void* user = NULL,
+                                    const wp<IAudioRecordCallback> &callback = nullptr,
                                     uint32_t notificationFrames = 0,
                                     audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
                                     audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                                    const audio_attributes_t* pAttributes = NULL,
+                                    const audio_attributes_t* pAttributes = nullptr,
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+                                    audio_microphone_direction_t
+                                        selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+                                    float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT);
+
+
+                        AudioRecord(audio_source_t inputSource,
+                                    uint32_t sampleRate,
+                                    audio_format_t format,
+                                    audio_channel_mask_t channelMask,
+                                    const android::content::AttributionSourceState& client,
+                                    size_t frameCount,
+                                    legacy_callback_t callback,
+                                    void* user,
+                                    uint32_t notificationFrames = 0,
+                                    audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
+                                    transfer_type transferType = TRANSFER_DEFAULT,
+                                    audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
+                                    const audio_attributes_t* pAttributes = nullptr,
                                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                                     audio_microphone_direction_t
                                         selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
@@ -223,13 +266,12 @@
      *
      * threadCanCallJava:  Whether callbacks are made from an attached thread and thus can call JNI.
      */
-            status_t    set(audio_source_t inputSource,
+           status_t    set(audio_source_t inputSource,
                             uint32_t sampleRate,
                             audio_format_t format,
                             audio_channel_mask_t channelMask,
                             size_t frameCount = 0,
-                            callback_t cbf = NULL,
-                            void* user = NULL,
+                            const wp<IAudioRecordCallback> &callback = nullptr,
                             uint32_t notificationFrames = 0,
                             bool threadCanCallJava = false,
                             audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
@@ -237,7 +279,28 @@
                             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
                             uid_t uid = AUDIO_UID_INVALID,
                             pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = NULL,
+                            const audio_attributes_t* pAttributes = nullptr,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
+                            audio_microphone_direction_t
+                                selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
+                            float selectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT,
+                            int32_t maxSharedAudioHistoryMs = 0);
+
+           status_t    set(audio_source_t inputSource,
+                            uint32_t sampleRate,
+                            audio_format_t format,
+                            audio_channel_mask_t channelMask,
+                            size_t frameCount,
+                            legacy_callback_t callback,
+                            void* user,
+                            uint32_t notificationFrames = 0,
+                            bool threadCanCallJava = false,
+                            audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
+                            transfer_type transferType = TRANSFER_DEFAULT,
+                            audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
+                            uid_t uid = AUDIO_UID_INVALID,
+                            pid_t pid = -1,
+                            const audio_attributes_t* pAttributes = nullptr,
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE,
                             audio_microphone_direction_t
                                 selectedMicDirection = MIC_DIRECTION_UNSPECIFIED,
@@ -673,9 +736,11 @@
     bool                    mActive;
 
     // for client callback handler
-    callback_t              mCbf;                   // callback handler for events, or NULL
-    void*                   mUserData;
 
+    wp<IAudioRecordCallback> mCallback;
+    sp<IAudioRecordCallback> mLegacyCallbackWrapper;
+
+    bool                    mInitialized = false;   // Protect against double set
     // for notification APIs
     uint32_t                mNotificationFramesReq; // requested number of frames between each
                                                     // notification callback
@@ -760,6 +825,13 @@
     bool                    mTimestampRetrogradePositionReported = false; // reduce log spam
     bool                    mTimestampRetrogradeTimeReported = false;     // reduce log spam
 
+    // Format conversion. Maybe needed for adding fast tracks whose format is different from server.
+    audio_config_base_t     mServerConfig;
+    size_t                  mServerFrameSize;
+    size_t                  mServerSampleSize;
+    std::unique_ptr<uint8_t[]> mFormatConversionBufRaw;
+    Buffer                  mFormatConversionBuffer;
+
 private:
     class DeathNotifier : public IBinder::DeathRecipient {
     public:
@@ -824,6 +896,8 @@
     MediaMetrics mMediaMetrics;
     std::string mMetricsId;  // GUARDED_BY(mLock), could change in createRecord_l().
     std::string mCallerName; // for example "aaudio"
+
+    void reportError(status_t status, const char *event, const char *message) const;
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 869bd6e..360b83d 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,26 +19,31 @@
 
 #include <sys/types.h>
 
+#include <set>
+#include <vector>
+
+#include <android/content/AttributionSourceState.h>
 #include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerClient.h>
 #include <android/media/BnAudioPolicyServiceClient.h>
 #include <android/media/INativeSpatializerCallback.h>
 #include <android/media/ISpatializer.h>
-#include <android/content/AttributionSourceState.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioPort.h>
 #include <media/AidlConversionUtil.h>
+#include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioProductStrategy.h>
 #include <media/AudioVolumeGroup.h>
 #include <media/AudioIoDescriptor.h>
 #include <media/MicrophoneInfo.h>
-#include <set>
 #include <system/audio.h>
 #include <system/audio_effect.h>
 #include <system/audio_policy.h>
 #include <utils/Errors.h>
 #include <utils/Mutex.h>
-#include <vector>
 
 using android::content::AttributionSourceState;
 
@@ -72,6 +77,7 @@
                                        audio_patch_handle_t patchHandle,
                                        audio_source_t source);
 typedef void (*routing_callback)();
+typedef void (*vol_range_init_req_callback)();
 
 class IAudioFlinger;
 class String8;
@@ -149,6 +155,7 @@
     static void setDynPolicyCallback(dynamic_policy_callback cb);
     static void setRecordConfigCallback(record_config_callback);
     static void setRoutingCallback(routing_callback cb);
+    static void setVolInitReqCallback(vol_range_init_req_callback cb);
 
     // Sets the binder to use for accessing the AudioFlinger service. This enables the system server
     // to grant specific isolated processes access to the audio system. Currently used only for the
@@ -259,8 +266,8 @@
     // IAudioPolicyService interface (see AudioPolicyInterface for method descriptions)
     //
     static void onNewAudioModulesAvailable();
-    static status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state,
-                                             const char *device_address, const char *device_name,
+    static status_t setDeviceConnectionState(audio_policy_dev_state_t state,
+                                             const android::media::audio::common::AudioPort& port,
                                              audio_format_t encodedFormat);
     static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                 const char *device_address);
@@ -281,7 +288,8 @@
                                      audio_output_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      audio_port_handle_t *portId,
-                                     std::vector<audio_io_handle_t> *secondaryOutputs);
+                                     std::vector<audio_io_handle_t> *secondaryOutputs,
+                                     bool *isSpatialized);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
@@ -323,9 +331,9 @@
     static status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
 
     static product_strategy_t getStrategyForStream(audio_stream_type_t stream);
-    static audio_devices_t getDevicesForStream(audio_stream_type_t stream);
     static status_t getDevicesForAttributes(const AudioAttributes &aa,
-                                            AudioDeviceTypeAddrVector *devices);
+                                            AudioDeviceTypeAddrVector *devices,
+                                            bool forVolume);
 
     static audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
     static status_t registerEffect(const effect_descriptor_t *desc,
@@ -342,6 +350,7 @@
     static void clearAudioConfigCache();
 
     static const sp<media::IAudioPolicyService> get_audio_policy_service();
+    static void clearAudioPolicyService();
 
     // helpers for android.media.AudioManager.getProperty(), see description there for meaning
     static uint32_t getPrimaryOutputSamplingRate();
@@ -369,7 +378,8 @@
                                    struct audio_port_v7 *ports,
                                    unsigned int *generation);
 
-    /* Get attributes for a given audio port */
+    /* Get attributes for a given audio port. On input, the port
+     * only needs the 'id' field to be filled in. */
     static status_t getAudioPort(struct audio_port_v7 *port);
 
     /* Create an audio patch between several source and sink ports */
@@ -420,8 +430,8 @@
 
     static status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
 
-    static status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                                    std::vector<audio_format_t> *formats);
+    static status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+                                    audio_devices_t device, std::vector<audio_format_t> *formats);
 
     // numSurroundFormats holds the maximum number of formats and bool value allowed in the array.
     // When numSurroundFormats is 0, surroundFormats and surroundFormatsEnabled will not be
@@ -433,13 +443,16 @@
                                                audio_format_t *surroundFormats);
     static status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
-    static status_t setAssistantUid(uid_t uid);
-    static status_t setHotwordDetectionServiceUid(uid_t uid);
+    static status_t setAssistantServicesUids(const std::vector<uid_t>& uids);
+    static status_t setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids);
+
     static status_t setA11yServicesUids(const std::vector<uid_t>& uids);
     static status_t setCurrentImeUid(uid_t uid);
 
     static bool     isHapticPlaybackSupported();
 
+    static bool     isUltrasoundSupported();
+
     static status_t listAudioProductStrategies(AudioProductStrategyVector &strategies);
     static status_t getProductStrategyFromAudioAttributes(
             const AudioAttributes &aa, product_strategy_t &productStrategy,
@@ -532,9 +545,32 @@
                                      const AudioDeviceTypeAddrVector &devices,
                                      bool *canBeSpatialized);
 
+    /**
+     * Query how the direct playback is currently supported on the device.
+     * @param attr audio attributes describing the playback use case
+     * @param config audio configuration for the playback
+     * @param directMode out: a set of flags describing how the direct playback is currently
+     *        supported on the device
+     * @return NO_ERROR in case of success, DEAD_OBJECT, NO_INIT, BAD_VALUE, PERMISSION_DENIED
+     *         in case of error.
+     */
+    static status_t getDirectPlaybackSupport(const audio_attributes_t *attr,
+                                             const audio_config_t *config,
+                                             audio_direct_mode_t *directMode);
+
+
+    /**
+     * Query which direct audio profiles are available for the specified audio attributes.
+     * @param attr audio attributes describing the playback use case
+     * @param audioProfiles out: a vector of audio profiles
+     * @return NO_ERROR in case of success, DEAD_OBJECT, NO_INIT, BAD_VALUE, PERMISSION_DENIED
+     *         in case of error.
+     */
+    static status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
+                                            std::vector<audio_profile>* audioProfiles);
 
     // A listener for capture state changes.
-    class CaptureStateListener : public RefBase {
+    class CaptureStateListener : public virtual RefBase {
     public:
         // Called whenever capture state changes.
         virtual void onStateChanged(bool active) = 0;
@@ -559,7 +595,7 @@
 
     // ----------------------------------------------------------------------------
 
-    class AudioVolumeGroupCallback : public RefBase
+    class AudioVolumeGroupCallback : public virtual RefBase
     {
     public:
 
@@ -574,7 +610,7 @@
     static status_t addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
     static status_t removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
 
-    class AudioPortCallback : public RefBase
+    class AudioPortCallback : public virtual RefBase
     {
     public:
 
@@ -590,7 +626,7 @@
     static status_t addAudioPortCallback(const sp<AudioPortCallback>& callback);
     static status_t removeAudioPortCallback(const sp<AudioPortCallback>& callback);
 
-    class AudioDeviceCallback : public RefBase
+    class AudioDeviceCallback : public virtual RefBase
     {
     public:
 
@@ -612,6 +648,14 @@
 
     static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
 
+    static status_t getMmapPolicyInfo(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos);
+
+    static int32_t getAAudioMixerBurstCount();
+
+    static int32_t getAAudioHardwareBurstMinUsec();
+
 private:
 
     class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
@@ -688,13 +732,14 @@
         binder::Status onRecordingConfigurationUpdate(
                 int32_t event,
                 const media::RecordClientInfo& clientInfo,
-                const media::AudioConfigBase& clientConfig,
+                const media::audio::common::AudioConfigBase& clientConfig,
                 const std::vector<media::EffectDescriptor>& clientEffects,
-                const media::AudioConfigBase& deviceConfig,
+                const media::audio::common::AudioConfigBase& deviceConfig,
                 const std::vector<media::EffectDescriptor>& effects,
                 int32_t patchHandle,
-                media::AudioSourceType source) override;
+                media::audio::common::AudioSource source) override;
         binder::Status onRoutingUpdated();
+        binder::Status onVolumeRangeInitRequest();
 
     private:
         Mutex                               mLock;
@@ -722,6 +767,7 @@
     static dynamic_policy_callback gDynPolicyCallback;
     static record_config_callback gRecordConfigCallback;
     static routing_callback gRoutingCallback;
+    static vol_range_init_req_callback gVolRangeInitReqCallback;
 
     static size_t gInBuffSize;
     // previous parameters for recording buffer size queries
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index fa21265..9f540e6 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -95,34 +95,36 @@
 
     class Buffer
     {
+    friend AudioTrack;
     public:
-        // FIXME use m prefix
+       size_t size() const { return mSize; }
+       size_t getFrameCount() const { return frameCount; }
+       uint8_t * data() const { return ui8; }
+       // Leaving public for now to ease refactoring. This class will be
+       // replaced
         size_t      frameCount;   // number of sample frames corresponding to size;
                                   // on input to obtainBuffer() it is the number of frames desired,
                                   // on output from obtainBuffer() it is the number of available
                                   //    [empty slots for] frames to be filled
                                   // on input to releaseBuffer() it is currently ignored
-
-        size_t      size;         // input/output in bytes == frameCount * frameSize
+    private:
+        size_t      mSize;        // input/output in bytes == frameCount * frameSize
                                   // on input to obtainBuffer() it is ignored
                                   // on output from obtainBuffer() it is the number of available
                                   //    [empty slots for] bytes to be filled,
                                   //    which is frameCount * frameSize
                                   // on input to releaseBuffer() it is the number of bytes to
                                   //    release
-                                  // FIXME This is redundant with respect to frameCount.  Consider
-                                  //    removing size and making frameCount the primary field.
 
         union {
             void*       raw;
             int16_t*    i16;      // signed 16-bit
-            int8_t*     i8;       // unsigned 8-bit, offset by 0x80
+            uint8_t*    ui8;      // unsigned 8-bit, offset by 0x80
         };                        // input to obtainBuffer(): unused, output: pointer to buffer
 
         uint32_t    sequence;       // IAudioTrack instance sequence number, as of obtainBuffer().
                                     // It is set by obtainBuffer() and confirmed by releaseBuffer().
                                     // Not "user-serviceable".
-                                    // TODO Consider sp<IMemory> instead, or in addition to this.
     };
 
     /* As a convenience, if a callback is supplied, a handler thread
@@ -146,7 +148,79 @@
      *          - EVENT_NEW_TIMESTAMP: pointer to const AudioTimestamp.
      */
 
-    typedef void (*callback_t)(int event, void* user, void *info);
+    typedef void (*legacy_callback_t)(int event, void* user, void* info);
+    class IAudioTrackCallback : public virtual RefBase {
+      friend AudioTrack;
+      protected:
+       /* Request to write more data to buffer.
+        * This event only occurs for TRANSFER_CALLBACK.
+        * If this event is delivered but the callback handler does not want to write more data,
+        * the handler must ignore the event by returning zero.
+        * This might occur, for example, if the application is waiting for source data or is at
+        * the end of stream.
+        * For data filling, it is preferred that the callback does not block and instead returns
+        * a short count of the amount of data actually delivered.
+        * Parameters:
+        *  - buffer: Buffer to fill
+        * Returns:
+        * Amount of data actually written in bytes.
+        */
+        virtual size_t onMoreData([[maybe_unused]] const AudioTrack::Buffer& buffer) { return 0; }
+
+        // Buffer underrun occurred. This will not occur for static tracks.
+        virtual void onUnderrun() {}
+
+       /* Sample loop end was reached; playback restarted from loop start if loop count was not 0
+        * for a static track.
+        * Parameters:
+        *  - loopsRemaining: Number of loops remaining to be played. -1 if infinite looping.
+        */
+        virtual void onLoopEnd([[maybe_unused]] int32_t loopsRemaining) {}
+
+       /* Playback head is at the specified marker (See setMarkerPosition()).
+        * Parameters:
+        *  - onMarker: Marker position in frames
+        */
+        virtual void onMarker([[maybe_unused]] uint32_t markerPosition) {}
+
+       /* Playback head is at a new position (See setPositionUpdatePeriod()).
+        * Parameters:
+        *  - newPos: New position in frames
+        */
+        virtual void onNewPos([[maybe_unused]] uint32_t newPos) {}
+
+        // Playback has completed for a static track.
+        virtual void onBufferEnd() {}
+
+        // IAudioTrack was re-created, either due to re-routing and voluntary invalidation
+        // by mediaserver, or mediaserver crash.
+        virtual void onNewIAudioTrack() {}
+
+        // Sent after all the buffers queued in AF and HW are played back (after stop is called)
+        // for an offloaded track.
+        virtual void onStreamEnd() {}
+
+       /* Delivered periodically and when there's a significant change
+        * in the mapping from frame position to presentation time.
+        * See AudioTimestamp for the information included with event.
+        * TODO not yet implemented.
+        * Parameters:
+        *  - timestamp: New frame position and presentation time mapping.
+        */
+        virtual void onNewTimestamp([[maybe_unused]] AudioTimestamp timestamp) {}
+
+       /* Notification that more data can be given by write()
+        * This event only occurs for TRANSFER_SYNC_NOTIF_CALLBACK.
+        * Similar to onMoreData(), return the number of frames actually written
+        * Parameters:
+        *  - buffer: Buffer to fill
+        * Returns:
+        * Amount of data actually written in bytes.
+        */
+        virtual size_t onCanWriteMoreData([[maybe_unused]] const AudioTrack::Buffer& buffer) {
+            return 0;
+        }
+    };
 
     /* Returns the minimum frame count required for the successful creation of
      * an AudioTrack object.
@@ -257,15 +331,34 @@
                                     audio_channel_mask_t channelMask,
                                     size_t frameCount    = 0,
                                     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                                    callback_t cbf       = NULL,
-                                    void* user           = NULL,
+                                    const wp<IAudioTrackCallback>& callback = nullptr,
                                     int32_t notificationFrames = 0,
                                     audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
-                                    const audio_offload_info_t *offloadInfo = NULL,
+                                    const audio_offload_info_t *offloadInfo = nullptr,
                                     const AttributionSourceState& attributionSource =
                                         AttributionSourceState(),
-                                    const audio_attributes_t* pAttributes = NULL,
+                                    const audio_attributes_t* pAttributes = nullptr,
+                                    bool doNotReconnect = false,
+                                    float maxRequiredSpeed = 1.0f,
+                                    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+
+
+                        AudioTrack( audio_stream_type_t streamType,
+                                    uint32_t sampleRate,
+                                    audio_format_t format,
+                                    audio_channel_mask_t channelMask,
+                                    size_t frameCount,
+                                    audio_output_flags_t flags,
+                                    legacy_callback_t cbf,
+                                    void* user = nullptr,
+                                    int32_t notificationFrames = 0,
+                                    audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
+                                    transfer_type transferType = TRANSFER_DEFAULT,
+                                    const audio_offload_info_t *offloadInfo = nullptr,
+                                    const AttributionSourceState& attributionSource =
+                                        AttributionSourceState(),
+                                    const audio_attributes_t* pAttributes = nullptr,
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f,
                                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
@@ -281,22 +374,39 @@
      * It is recommended to pass a callback function to be notified of playback end by an
      * EVENT_UNDERRUN event.
      */
-
                         AudioTrack( audio_stream_type_t streamType,
                                     uint32_t sampleRate,
                                     audio_format_t format,
                                     audio_channel_mask_t channelMask,
                                     const sp<IMemory>& sharedBuffer,
                                     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                                    callback_t cbf      = NULL,
-                                    void* user          = NULL,
+                                    const wp<IAudioTrackCallback>& callback = nullptr,
                                     int32_t notificationFrames = 0,
                                     audio_session_t sessionId   = AUDIO_SESSION_ALLOCATE,
                                     transfer_type transferType = TRANSFER_DEFAULT,
-                                    const audio_offload_info_t *offloadInfo = NULL,
+                                    const audio_offload_info_t *offloadInfo = nullptr,
                                     const AttributionSourceState& attributionSource =
                                         AttributionSourceState(),
-                                    const audio_attributes_t* pAttributes = NULL,
+                                    const audio_attributes_t* pAttributes = nullptr,
+                                    bool doNotReconnect = false,
+                                    float maxRequiredSpeed = 1.0f);
+
+
+                        AudioTrack( audio_stream_type_t streamType,
+                                    uint32_t sampleRate,
+                                    audio_format_t format,
+                                    audio_channel_mask_t channelMask,
+                                    const sp<IMemory>& sharedBuffer,
+                                    audio_output_flags_t flags,
+                                    legacy_callback_t cbf,
+                                    void* user          = nullptr,
+                                    int32_t notificationFrames = 0,
+                                    audio_session_t sessionId   = AUDIO_SESSION_ALLOCATE,
+                                    transfer_type transferType = TRANSFER_DEFAULT,
+                                    const audio_offload_info_t *offloadInfo = nullptr,
+                                    const AttributionSourceState& attributionSource =
+                                        AttributionSourceState(),
+                                    const audio_attributes_t* pAttributes = nullptr,
                                     bool doNotReconnect = false,
                                     float maxRequiredSpeed = 1.0f);
 
@@ -334,20 +444,73 @@
                             audio_channel_mask_t channelMask,
                             size_t frameCount   = 0,
                             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                            callback_t cbf      = NULL,
-                            void* user          = NULL,
+                            const wp<IAudioTrackCallback>& callback = nullptr,
                             int32_t notificationFrames = 0,
                             const sp<IMemory>& sharedBuffer = 0,
                             bool threadCanCallJava = false,
                             audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
-                            const audio_offload_info_t *offloadInfo = NULL,
+                            const audio_offload_info_t *offloadInfo = nullptr,
                             const AttributionSourceState& attributionSource =
                                 AttributionSourceState(),
-                            const audio_attributes_t* pAttributes = NULL,
+                            const audio_attributes_t* pAttributes = nullptr,
                             bool doNotReconnect = false,
                             float maxRequiredSpeed = 1.0f,
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+
+            struct SetParams {
+                audio_stream_type_t streamType;
+                uint32_t sampleRate;
+                audio_format_t format;
+                audio_channel_mask_t channelMask;
+                size_t frameCount;
+                audio_output_flags_t flags;
+                wp<IAudioTrackCallback> callback;
+                int32_t notificationFrames;
+                sp<IMemory> sharedBuffer;
+                bool threadCanCallJava;
+                audio_session_t sessionId;
+                transfer_type transferType;
+                // TODO don't take pointers here
+                const audio_offload_info_t *offloadInfo;
+                AttributionSourceState attributionSource;
+                const audio_attributes_t* pAttributes;
+                bool doNotReconnect;
+                float maxRequiredSpeed;
+                audio_port_handle_t selectedDeviceId;
+            };
+        private:
+            // Note: Consumes parameters
+            void        set(SetParams& s) {
+                (void)set(s.streamType, s.sampleRate, s.format, s.channelMask, s.frameCount,
+                          s.flags, std::move(s.callback), s.notificationFrames,
+                          std::move(s.sharedBuffer), s.threadCanCallJava, s.sessionId,
+                          s.transferType, s.offloadInfo, std::move(s.attributionSource),
+                          s.pAttributes, s.doNotReconnect, s.maxRequiredSpeed, s.selectedDeviceId);
+                        }
+            void       onFirstRef() override;
+        public:
+            status_t    set(audio_stream_type_t streamType,
+                            uint32_t sampleRate,
+                            audio_format_t format,
+                            audio_channel_mask_t channelMask,
+                            size_t frameCount,
+                            audio_output_flags_t flags,
+                            legacy_callback_t callback,
+                            void * user = nullptr,
+                            int32_t notificationFrames = 0,
+                            const sp<IMemory>& sharedBuffer = 0,
+                            bool threadCanCallJava = false,
+                            audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
+                            transfer_type transferType = TRANSFER_DEFAULT,
+                            const audio_offload_info_t *offloadInfo = nullptr,
+                            const AttributionSourceState& attributionSource =
+                                AttributionSourceState(),
+                            const audio_attributes_t* pAttributes = nullptr,
+                            bool doNotReconnect = false,
+                            float maxRequiredSpeed = 1.0f,
+                            audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
+
     // FIXME(b/169889714): Vendor code depends on the old method signature at link time
             status_t    set(audio_stream_type_t streamType,
                             uint32_t sampleRate,
@@ -355,17 +518,17 @@
                             uint32_t channelMask,
                             size_t frameCount   = 0,
                             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
-                            callback_t cbf      = NULL,
-                            void* user          = NULL,
+                            legacy_callback_t cbf = nullptr,
+                            void* user          = nullptr,
                             int32_t notificationFrames = 0,
                             const sp<IMemory>& sharedBuffer = 0,
                             bool threadCanCallJava = false,
                             audio_session_t sessionId  = AUDIO_SESSION_ALLOCATE,
                             transfer_type transferType = TRANSFER_DEFAULT,
-                            const audio_offload_info_t *offloadInfo = NULL,
+                            const audio_offload_info_t *offloadInfo = nullptr,
                             uid_t uid = AUDIO_UID_INVALID,
                             pid_t pid = -1,
-                            const audio_attributes_t* pAttributes = NULL,
+                            const audio_attributes_t* pAttributes = nullptr,
                             bool doNotReconnect = false,
                             float maxRequiredSpeed = 1.0f,
                             audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE);
@@ -429,8 +592,7 @@
      * less than or equal to the getBufferCapacityInFrames().
      * It may also be adjusted slightly for internal reasons.
      *
-     * Return the final size or a negative error if the track is unitialized
-     * or does not support variable sizes.
+     * Return the final size or a negative value (NO_INIT) if the track is uninitialized.
      */
             ssize_t     setBufferSizeInFrames(size_t size);
 
@@ -1182,7 +1344,6 @@
     sp<IMemory>             mSharedBuffer;
     transfer_type           mTransfer;
     audio_offload_info_t    mOffloadInfoCopy;
-    const audio_offload_info_t* mOffloadInfo;
     audio_attributes_t      mAttributes;
 
     size_t                  mFrameSize;             // frame size in bytes
@@ -1216,11 +1377,13 @@
     }
 
     // for client callback handler
-    callback_t              mCbf;                   // callback handler for events, or NULL
-    void*                   mUserData;
-
+    wp<IAudioTrackCallback> mCallback;                   // callback handler for events, or NULL
+    sp<IAudioTrackCallback> mLegacyCallbackWrapper;      // wrapper for legacy callback interface
     // for notification APIs
+    std::unique_ptr<SetParams> mSetParams;          // Temporary copy of ctor params to allow for
+                                                    // deferred set after first reference.
 
+    bool                    mInitialized = false;   // Set after track is initialized
     // next 2 fields are const after constructor or set()
     uint32_t                mNotificationFramesReq; // requested number of frames between each
                                                     // notification callback,
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 9e5019e..3c3715d 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -37,10 +37,12 @@
 #include <string>
 #include <vector>
 
+#include <android/content/AttributionSourceState.h>
 #include <android/media/AudioVibratorInfo.h>
 #include <android/media/BnAudioFlingerService.h>
 #include <android/media/BpAudioFlingerService.h>
-#include <android/content/AttributionSourceState.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
 #include "android/media/CreateEffectRequest.h"
 #include "android/media/CreateEffectResponse.h"
 #include "android/media/CreateRecordRequest.h"
@@ -63,7 +65,7 @@
 
 // ----------------------------------------------------------------------------
 
-class IAudioFlinger : public RefBase {
+class IAudioFlinger : public virtual RefBase {
 public:
     static constexpr char DEFAULT_SERVICE_NAME[] = "media.audio_flinger";
 
@@ -166,6 +168,7 @@
         sp<IMemory> buffers;
         audio_port_handle_t portId;
         sp<media::IAudioRecord> audioRecord;
+        audio_config_base_t serverConfig;
 
         ConversionResult<media::CreateRecordResponse> toAidl() const;
         static ConversionResult<CreateRecordOutput>
@@ -347,6 +350,16 @@
 
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
+
+    virtual status_t getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
+
+    virtual int32_t getAAudioMixerBurstCount() = 0;
+
+    virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
 };
 
 /**
@@ -443,6 +456,12 @@
     status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
+    status_t getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
+    int32_t getAAudioMixerBurstCount() override;
+    int32_t getAAudioHardwareBurstMinUsec() override;
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -463,9 +482,9 @@
      * Legacy server should implement this interface in order to be wrapped.
      */
     class Delegate : public IAudioFlinger {
-    protected:
         friend class AudioFlingerServerAdapter;
-
+    public:
+        // expose the TransactionCode enum for TimeCheck purposes.
         enum class TransactionCode {
             CREATE_TRACK = media::BnAudioFlingerService::TRANSACTION_createTrack,
             CREATE_RECORD = media::BnAudioFlingerService::TRANSACTION_createRecord,
@@ -528,8 +547,13 @@
             SET_AUDIO_HAL_PIDS = media::BnAudioFlingerService::TRANSACTION_setAudioHalPids,
             SET_VIBRATOR_INFOS = media::BnAudioFlingerService::TRANSACTION_setVibratorInfos,
             UPDATE_SECONDARY_OUTPUTS = media::BnAudioFlingerService::TRANSACTION_updateSecondaryOutputs,
+            GET_MMAP_POLICY_INFOS = media::BnAudioFlingerService::TRANSACTION_getMmapPolicyInfos,
+            GET_AAUDIO_MIXER_BURST_COUNT = media::BnAudioFlingerService::TRANSACTION_getAAudioMixerBurstCount,
+            GET_AAUDIO_HARDWARE_BURST_MIN_USEC = media::BnAudioFlingerService::TRANSACTION_getAAudioHardwareBurstMinUsec,
+            SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
         };
 
+    protected:
         /**
          * And optional hook, called on every transaction, allowing additional operations to be
          * performed before/after the unparceling  ofthe data and dispatching to the respective
@@ -569,7 +593,8 @@
     Status createRecord(const media::CreateRecordRequest& request,
                         media::CreateRecordResponse* _aidl_return) override;
     Status sampleRate(int32_t ioHandle, int32_t* _aidl_return) override;
-    Status format(int32_t output, media::audio::common::AudioFormat* _aidl_return) override;
+    Status format(int32_t output,
+                  media::audio::common::AudioFormatDescription* _aidl_return) override;
     Status frameCount(int32_t ioHandle, int64_t* _aidl_return) override;
     Status latency(int32_t output, int32_t* _aidl_return) override;
     Status setMasterVolume(float value) override;
@@ -578,12 +603,13 @@
     Status masterMute(bool* _aidl_return) override;
     Status setMasterBalance(float balance) override;
     Status getMasterBalance(float* _aidl_return) override;
-    Status setStreamVolume(media::AudioStreamType stream, float value, int32_t output) override;
-    Status setStreamMute(media::AudioStreamType stream, bool muted) override;
-    Status
-    streamVolume(media::AudioStreamType stream, int32_t output, float* _aidl_return) override;
-    Status streamMute(media::AudioStreamType stream, bool* _aidl_return) override;
-    Status setMode(media::AudioMode mode) override;
+    Status setStreamVolume(media::audio::common::AudioStreamType stream,
+                           float value, int32_t output) override;
+    Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
+    Status streamVolume(media::audio::common::AudioStreamType stream,
+                        int32_t output, float* _aidl_return) override;
+    Status streamMute(media::audio::common::AudioStreamType stream, bool* _aidl_return) override;
+    Status setMode(media::audio::common::AudioMode mode) override;
     Status setMicMute(bool state) override;
     Status getMicMute(bool* _aidl_return) override;
     Status setRecordSilenced(int32_t portId, bool silenced) override;
@@ -591,8 +617,10 @@
     Status
     getParameters(int32_t ioHandle, const std::string& keys, std::string* _aidl_return) override;
     Status registerClient(const sp<media::IAudioFlingerClient>& client) override;
-    Status getInputBufferSize(int32_t sampleRate, media::audio::common::AudioFormat format,
-                              int32_t channelMask, int64_t* _aidl_return) override;
+    Status getInputBufferSize(int32_t sampleRate,
+                              const media::audio::common::AudioFormatDescription& format,
+                              const media::audio::common::AudioChannelLayout& channelMask,
+                              int64_t* _aidl_return) override;
     Status openOutput(const media::OpenOutputRequest& request,
                       media::OpenOutputResponse* _aidl_return) override;
     Status openDuplicateOutput(int32_t output1, int32_t output2, int32_t* _aidl_return) override;
@@ -602,7 +630,7 @@
     Status openInput(const media::OpenInputRequest& request,
                      media::OpenInputResponse* _aidl_return) override;
     Status closeInput(int32_t input) override;
-    Status invalidateStream(media::AudioStreamType stream) override;
+    Status invalidateStream(media::audio::common::AudioStreamType stream) override;
     Status setVoiceVolume(float volume) override;
     Status getRenderPosition(int32_t output, media::RenderPosition* _aidl_return) override;
     Status getInputFramesLost(int32_t ioHandle, int32_t* _aidl_return) override;
@@ -611,7 +639,8 @@
     Status releaseAudioSessionId(int32_t audioSession, int32_t pid) override;
     Status queryNumberEffects(int32_t* _aidl_return) override;
     Status queryEffect(int32_t index, media::EffectDescriptor* _aidl_return) override;
-    Status getEffectDescriptor(const media::AudioUuid& effectUUID, const media::AudioUuid& typeUUID,
+    Status getEffectDescriptor(const media::audio::common::AudioUuid& effectUUID,
+                               const media::audio::common::AudioUuid& typeUUID,
                                int32_t preferredTypeFlag,
                                media::EffectDescriptor* _aidl_return) override;
     Status createEffect(const media::CreateEffectRequest& request,
@@ -637,6 +666,12 @@
     Status setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     Status updateSecondaryOutputs(
             const std::vector<media::TrackSecondaryOutputInfo>& trackSecondaryOutputInfos) override;
+    Status getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *_aidl_return) override;
+    Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
+    Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
+    Status setDeviceConnectedState(const media::AudioPort& port, bool connected) override;
 
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index 873f27a..54e778e 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -23,10 +23,8 @@
 
 #include <android/media/AudioMix.h>
 #include <android/media/AudioMixCallbackFlag.h>
-#include <android/media/AudioMixLatencyClass.h>
 #include <android/media/AudioMixRouteFlag.h>
 #include <android/media/AudioMixType.h>
-#include <android/media/AudioMode.h>
 #include <android/media/AudioOffloadMode.h>
 #include <android/media/AudioPolicyForceUse.h>
 #include <android/media/AudioPolicyForcedConfig.h>
@@ -39,11 +37,6 @@
 
 namespace android {
 
-ConversionResult<volume_group_t>
-aidl2legacy_int32_t_volume_group_t(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
-
 ConversionResult<product_strategy_t>
 aidl2legacy_int32_t_product_strategy_t(int32_t aidl);
 ConversionResult<int32_t>
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index a575616..46e9501 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -28,7 +28,7 @@
 
 namespace android {
 
-class ToneGenerator {
+class ToneGenerator : public AudioTrack::IAudioTrackCallback {
 public:
 
     // List of all available tones
@@ -156,6 +156,9 @@
 
     ToneGenerator(audio_stream_type_t streamType, float volume, bool threadCanCallJava = false,
             std::string opPackageName = {});
+
+    void onFirstRef() override;
+
     ~ToneGenerator();
 
     bool startTone(tone_type toneType, int durationMs = -1);
@@ -222,7 +225,11 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
+         // TAIWAN supervisory tones
         TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
+         // NEW ZEALAND supervisory tones
+        TONE_NZ_CALL_WAITING,       // Call waiting tone: 400 Hz,  0.2s ON, 3s OFF,
+                                    //        0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
         NUM_ALTERNATE_TONES
     };
 
@@ -236,6 +243,7 @@
         IRELAND,
         INDIA,
         TAIWAN,
+        NZ,
         CEPT,
         NUM_REGIONS
     };
@@ -284,11 +292,10 @@
     static const ToneDescriptor sToneDescriptors[];
 
     bool mThreadCanCallJava;
-    unsigned int mTotalSmp;  // Total number of audio samples played (gives current time)
+    uint64_t mTotalSmp;  // Total number of audio samples played (gives current time)
+    // Since these types are 32 bit, we may have issues with aborting on
+    // overflow now that we have integer overflow sanitization enabled globally.
     unsigned int mNextSegSmp;  // Position of next segment transition expressed in samples
-    // NOTE: because mTotalSmp, mNextSegSmp are stored on 32 bit, current design will operate properly
-    // only if tone duration is less than about 27 Hours(@44100Hz sampling rate). If this time is exceeded,
-    // no crash will occur but tone sequence will show a glitch.
     unsigned int mMaxSmp;  // Maximum number of audio samples played (maximun tone duration)
     int mDurationMs;  // Maximum tone duration in ms
 
@@ -311,6 +318,7 @@
     unsigned int mProcessSize;  // Size of audio blocks generated at a time by audioCallback() (in PCM frames).
     struct timespec mStartTime; // tone start time: needed to guaranty actual tone duration
 
+    size_t onMoreData(const AudioTrack::Buffer& buffer) override;
     bool initAudioTrack();
     static void audioCallback(int event, void* user, void *info);
     bool prepareWave();
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 80124b8..fe88116 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -28,8 +28,8 @@
     explicit TrackPlayerBase();
     virtual ~TrackPlayerBase();
 
-            void init(AudioTrack* pat, player_type_t playerType, audio_usage_t usage,
-                    audio_session_t sessionId);
+    void init(const sp<AudioTrack>& pat, const sp<AudioTrack::IAudioTrackCallback>& callback,
+              player_type_t playerType, audio_usage_t usage, audio_session_t sessionId);
     virtual void destroy();
 
     //IPlayer implementation
@@ -66,8 +66,8 @@
 
     // volume coming from the player volume API
     float mPlayerVolumeL, mPlayerVolumeR;
-
-   sp<SelfAudioDeviceCallback> mSelfAudioDeviceCallback;
+    sp<AudioTrack::IAudioTrackCallback> mCallbackHandle;
+    sp<SelfAudioDeviceCallback> mSelfAudioDeviceCallback;
 };
 
 } // namespace android
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index def7ca6..891293e 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -9,10 +9,35 @@
 
 cc_defaults {
     name: "libaudioclient_tests_defaults",
+    test_suites: ["device-tests"],
     cflags: [
         "-Wall",
         "-Werror",
     ],
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
+cc_test {
+    name: "audio_aidl_conversion_tests",
+    defaults: ["libaudioclient_tests_defaults"],
+    srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "android.media.audio.common.types-V1-cpp",
+        "audioclient-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libstagefright_foundation",
+    ],
 }
 
 cc_test {
@@ -30,8 +55,10 @@
 cc_test {
     name: "test_create_audiotrack",
     defaults: ["libaudioclient_tests_defaults"],
-    srcs: ["test_create_audiotrack.cpp",
-           "test_create_utils.cpp"],
+    srcs: [
+        "test_create_audiotrack.cpp",
+        "test_create_utils.cpp",
+    ],
     header_libs: [
         "libmedia_headers",
         "libmediametrics_headers",
@@ -49,8 +76,10 @@
 cc_test {
     name: "test_create_audiorecord",
     defaults: ["libaudioclient_tests_defaults"],
-    srcs: ["test_create_audiorecord.cpp",
-           "test_create_utils.cpp"],
+    srcs: [
+        "test_create_audiorecord.cpp",
+        "test_create_utils.cpp",
+    ],
     header_libs: [
         "libmedia_headers",
         "libmediametrics_headers",
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
new file mode 100644
index 0000000..997f62a
--- /dev/null
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include <media/AudioCommonTypes.h>
+#include <media/AidlConversion.h>
+
+using namespace android;
+using namespace android::aidl_utils;
+
+using media::audio::common::AudioChannelLayout;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioDeviceType;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioFormatType;
+using media::audio::common::PcmType;
+
+namespace {
+
+template<typename T> size_t hash(const T& t) {
+    return std::hash<T>{}(t);
+}
+
+AudioChannelLayout make_ACL_None() {
+    return AudioChannelLayout{};
+}
+
+AudioChannelLayout make_ACL_Invalid() {
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::invalid>(0);
+}
+
+AudioChannelLayout make_ACL_Stereo() {
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+}
+
+AudioChannelLayout make_ACL_LayoutArbitrary() {
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(
+            // Use channels that exist both for input and output,
+            // but doesn't form a known layout mask.
+            AudioChannelLayout::CHANNEL_FRONT_LEFT |
+            AudioChannelLayout::CHANNEL_FRONT_RIGHT |
+            AudioChannelLayout::CHANNEL_TOP_SIDE_LEFT |
+            AudioChannelLayout::CHANNEL_TOP_SIDE_RIGHT);
+}
+
+AudioChannelLayout make_ACL_ChannelIndex2() {
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::indexMask>(
+            AudioChannelLayout::INDEX_MASK_2);
+}
+
+AudioChannelLayout make_ACL_ChannelIndexArbitrary() {
+    // Use channels 1 and 3.
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::indexMask>(5);
+}
+
+AudioChannelLayout make_ACL_VoiceCall() {
+    return AudioChannelLayout::make<AudioChannelLayout::Tag::voiceMask>(
+            AudioChannelLayout::VOICE_CALL_MONO);
+}
+
+AudioDeviceDescription make_AudioDeviceDescription(AudioDeviceType type,
+        const std::string& connection = "") {
+    AudioDeviceDescription result;
+    result.type = type;
+    result.connection = connection;
+    return result;
+}
+
+AudioDeviceDescription make_ADD_None() {
+    return AudioDeviceDescription{};
+}
+
+AudioDeviceDescription make_ADD_DefaultIn() {
+    return make_AudioDeviceDescription(AudioDeviceType::IN_DEFAULT);
+}
+
+AudioDeviceDescription make_ADD_DefaultOut() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_DEFAULT);
+}
+
+AudioDeviceDescription make_ADD_WiredHeadset() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+            AudioDeviceDescription::CONNECTION_ANALOG());
+}
+
+AudioDeviceDescription make_ADD_BtScoHeadset() {
+    return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+            AudioDeviceDescription::CONNECTION_BT_SCO());
+}
+
+AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
+    AudioFormatDescription result;
+    result.type = type;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
+    auto result = make_AudioFormatDescription(AudioFormatType::PCM);
+    result.pcm = pcm;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
+    AudioFormatDescription result;
+    result.encoding = encoding;
+    return result;
+}
+
+AudioFormatDescription make_AudioFormatDescription(PcmType transport,
+        const std::string& encoding) {
+    auto result = make_AudioFormatDescription(encoding);
+    result.pcm = transport;
+    return result;
+}
+
+AudioFormatDescription make_AFD_Default() {
+    return AudioFormatDescription{};
+}
+
+AudioFormatDescription make_AFD_Invalid() {
+    return make_AudioFormatDescription(AudioFormatType::SYS_RESERVED_INVALID);
+}
+
+AudioFormatDescription make_AFD_Pcm16Bit() {
+    return make_AudioFormatDescription(PcmType::INT_16_BIT);
+}
+
+AudioFormatDescription make_AFD_Bitstream() {
+    return make_AudioFormatDescription("example");
+}
+
+AudioFormatDescription make_AFD_Encap() {
+    return make_AudioFormatDescription(PcmType::INT_16_BIT, "example.encap");
+}
+
+AudioFormatDescription make_AFD_Encap_with_Enc() {
+    auto afd = make_AFD_Encap();
+    afd.encoding += "+example";
+    return afd;
+}
+
+}  // namespace
+
+// Verify that two independently constructed ADDs/AFDs have the same hash.
+// This ensures that regardless of whether the ADD/AFD instance originates
+// from, it can be correctly compared to other ADD/AFD instance. Thus,
+// for example, a 16-bit integer format description provided by HAL
+// is identical to the same format description constructed by the framework.
+class HashIdentityTest : public ::testing::Test {
+  public:
+    template<typename T> void verifyHashIdentity(const std::vector<std::function<T()>>& valueGens) {
+        for (size_t i = 0; i < valueGens.size(); ++i) {
+            for (size_t j = 0; j < valueGens.size(); ++j) {
+                if (i == j) {
+                    EXPECT_EQ(hash(valueGens[i]()), hash(valueGens[i]())) << i;
+                } else {
+                    EXPECT_NE(hash(valueGens[i]()), hash(valueGens[j]())) << i << ", " << j;
+                }
+            }
+        }
+    }
+};
+
+TEST_F(HashIdentityTest, AudioChannelLayoutHashIdentity) {
+    verifyHashIdentity<AudioChannelLayout>({
+            make_ACL_None, make_ACL_Invalid, make_ACL_Stereo,
+            make_ACL_LayoutArbitrary, make_ACL_ChannelIndex2,
+            make_ACL_ChannelIndexArbitrary, make_ACL_VoiceCall});
+}
+
+TEST_F(HashIdentityTest, AudioDeviceDescriptionHashIdentity) {
+    verifyHashIdentity<AudioDeviceDescription>({
+            make_ADD_None, make_ADD_DefaultIn, make_ADD_DefaultOut, make_ADD_WiredHeadset,
+            make_ADD_BtScoHeadset});
+}
+
+TEST_F(HashIdentityTest, AudioFormatDescriptionHashIdentity) {
+    verifyHashIdentity<AudioFormatDescription>({
+            make_AFD_Default, make_AFD_Invalid, make_AFD_Pcm16Bit, make_AFD_Bitstream,
+            make_AFD_Encap, make_AFD_Encap_with_Enc});
+}
+
+using ChannelLayoutParam = std::tuple<AudioChannelLayout, bool /*isInput*/>;
+class AudioChannelLayoutRoundTripTest :
+        public testing::TestWithParam<ChannelLayoutParam> {};
+TEST_P(AudioChannelLayoutRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = std::get<0>(GetParam());
+    const bool isInput = std::get<1>(GetParam());
+    auto conv = aidl2legacy_AudioChannelLayout_audio_channel_mask_t(initial, isInput);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_channel_mask_t_AudioChannelLayout(conv.value(), isInput);
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioChannelLayoutRoundTrip,
+        AudioChannelLayoutRoundTripTest,
+        testing::Combine(
+                testing::Values(AudioChannelLayout{}, make_ACL_Invalid(), make_ACL_Stereo(),
+                        make_ACL_LayoutArbitrary(), make_ACL_ChannelIndex2(),
+                        make_ACL_ChannelIndexArbitrary()),
+                testing::Values(false, true)));
+INSTANTIATE_TEST_SUITE_P(AudioChannelVoiceRoundTrip,
+        AudioChannelLayoutRoundTripTest,
+        // In legacy constants the voice call is only defined for input.
+        testing::Combine(testing::Values(make_ACL_VoiceCall()), testing::Values(true)));
+
+using ChannelLayoutEdgeCaseParam = std::tuple<int /*legacy*/, bool /*isInput*/, bool /*isValid*/>;
+class AudioChannelLayoutEdgeCaseTest :
+        public testing::TestWithParam<ChannelLayoutEdgeCaseParam> {};
+TEST_P(AudioChannelLayoutEdgeCaseTest, Legacy2Aidl) {
+    const audio_channel_mask_t legacy = static_cast<audio_channel_mask_t>(std::get<0>(GetParam()));
+    const bool isInput = std::get<1>(GetParam());
+    const bool isValid = std::get<2>(GetParam());
+    auto conv = legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy, isInput);
+    EXPECT_EQ(isValid, conv.ok());
+}
+INSTANTIATE_TEST_SUITE_P(AudioChannelLayoutEdgeCase,
+        AudioChannelLayoutEdgeCaseTest,
+        testing::Values(
+                // Valid legacy input masks.
+                std::make_tuple(AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO, true, true),
+                std::make_tuple(AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO, true, true),
+                std::make_tuple(AUDIO_CHANNEL_IN_VOICE_CALL_MONO, true, true),
+                // Valid legacy output masks.
+                std::make_tuple(
+                        // This has the same numerical representation as Mask 'A' below
+                        AUDIO_CHANNEL_OUT_FRONT_CENTER | AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                        AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT, false, true),
+                std::make_tuple(
+                        // This has the same numerical representation as Mask 'B' below
+                        AUDIO_CHANNEL_OUT_FRONT_CENTER | AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                        AUDIO_CHANNEL_OUT_TOP_BACK_LEFT, false, true),
+                // Invalid legacy input masks.
+                std::make_tuple(AUDIO_CHANNEL_IN_6, true, false),
+                std::make_tuple(
+                        AUDIO_CHANNEL_IN_6 | AUDIO_CHANNEL_IN_FRONT_PROCESSED, true, false),
+                std::make_tuple(
+                        AUDIO_CHANNEL_IN_PRESSURE | AUDIO_CHANNEL_IN_X_AXIS |
+                        AUDIO_CHANNEL_IN_Y_AXIS | AUDIO_CHANNEL_IN_Z_AXIS, true, false),
+                std::make_tuple(  // Mask 'A'
+                        AUDIO_CHANNEL_IN_STEREO | AUDIO_CHANNEL_IN_VOICE_UPLINK, true, false),
+                std::make_tuple(  // Mask 'B'
+                        AUDIO_CHANNEL_IN_STEREO | AUDIO_CHANNEL_IN_VOICE_DNLINK, true, false)));
+
+class AudioDeviceDescriptionRoundTripTest :
+        public testing::TestWithParam<AudioDeviceDescription> {};
+TEST_P(AudioDeviceDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    auto conv = aidl2legacy_AudioDeviceDescription_audio_devices_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_devices_t_AudioDeviceDescription(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioDeviceDescriptionRoundTrip,
+        AudioDeviceDescriptionRoundTripTest,
+        testing::Values(AudioDeviceDescription{}, make_ADD_DefaultIn(),
+                make_ADD_DefaultOut(), make_ADD_WiredHeadset(), make_ADD_BtScoHeadset()));
+
+class AudioFormatDescriptionRoundTripTest :
+        public testing::TestWithParam<AudioFormatDescription> {};
+TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
+    const auto initial = GetParam();
+    auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = legacy2aidl_audio_format_t_AudioFormatDescription(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(AudioFormatDescriptionRoundTrip,
+        AudioFormatDescriptionRoundTripTest,
+        testing::Values(make_AFD_Invalid(), AudioFormatDescription{}, make_AFD_Pcm16Bit()));
diff --git a/media/libaudioclient/tests/test_create_audiorecord.cpp b/media/libaudioclient/tests/test_create_audiorecord.cpp
index 1cbcb71..2e0883b 100644
--- a/media/libaudioclient/tests/test_create_audiorecord.cpp
+++ b/media/libaudioclient/tests/test_create_audiorecord.cpp
@@ -98,14 +98,14 @@
         attributes.source = inputSource;
 
         sp<AudioRecord> record = new AudioRecord(attributionSource);
+        const auto emptyCallback = sp<AudioRecord::IAudioRecordCallback>::make();
 
         record->set(AUDIO_SOURCE_DEFAULT,
                    sampleRate,
                    format,
                    channelMask,
                    frameCount,
-                   fast ? callback : nullptr,
-                   nullptr,
+                   fast ? emptyCallback : nullptr,
                    notificationFrames,
                    false,
                    sessionId,
diff --git a/media/libaudioclient/tests/test_create_audiotrack.cpp b/media/libaudioclient/tests/test_create_audiotrack.cpp
index cf9b925..e7231d3 100644
--- a/media/libaudioclient/tests/test_create_audiotrack.cpp
+++ b/media/libaudioclient/tests/test_create_audiotrack.cpp
@@ -19,6 +19,7 @@
 #include <string.h>
 #include <unistd.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryDealer.h>
 #include <binder/MemoryHeapBase.h>
@@ -108,17 +109,15 @@
         memset(&attributes, 0, sizeof(attributes));
         attributes.content_type = contentType;
         attributes.usage = usage;
-
         sp<AudioTrack> track = new AudioTrack();
-
+        const auto emptyCallback = sp<AudioTrack::IAudioTrackCallback>::make();
         track->set(AUDIO_STREAM_DEFAULT,
                    sampleRate,
                    format,
                    channelMask,
                    frameCount,
                    flags,
-                   (fast || offload) ? callback : nullptr,
-                   nullptr,
+                   (fast || offload) ? emptyCallback : nullptr,
                    notificationFrames,
                    sharedBuffer,
                    false,
@@ -126,8 +125,7 @@
                    ((fast && sharedBuffer == 0) || offload) ?
                            AudioTrack::TRANSFER_CALLBACK : AudioTrack::TRANSFER_DEFAULT,
                    offload ? &offloadInfo : nullptr,
-                   getuid(),
-                   getpid(),
+                   AttributionSourceState(),
                    &attributes,
                    false,
                    1.0f,
diff --git a/media/libaudioclient/tests/test_create_utils.cpp b/media/libaudioclient/tests/test_create_utils.cpp
index 8aa1f13..caf5227 100644
--- a/media/libaudioclient/tests/test_create_utils.cpp
+++ b/media/libaudioclient/tests/test_create_utils.cpp
@@ -68,10 +68,6 @@
     return true;
 }
 
-void callback(int event __unused, void* user __unused, void *info __unused)
-{
-}
-
 int main(int argc, char **argv, test_func_t testFunc)
 {
     FILE *inputFile = nullptr;
diff --git a/media/libaudioclient/tests/test_create_utils.h b/media/libaudioclient/tests/test_create_utils.h
index 2ad646e..9a6f9fa 100644
--- a/media/libaudioclient/tests/test_create_utils.h
+++ b/media/libaudioclient/tests/test_create_utils.h
@@ -31,7 +31,6 @@
 
 bool checkVersion(FILE *inputFile, const char *version);
 
-void callback(int event, void* user, void *info);
 
 typedef int (*test_func_t)(FILE *inputFile, int outputFileFd);
 
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index 3bef55b..159f898 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -11,6 +11,10 @@
     name: "libaudiofoundation_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
 
     export_include_dirs: ["include"],
     header_libs: [
@@ -24,9 +28,11 @@
         "libmedia_helper_headers",
     ],
     static_libs: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
     ],
     export_static_lib_headers: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
     ],
     host_supported: true,
@@ -52,6 +58,7 @@
     ],
 
     shared_libs: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libaudioutils",
@@ -63,6 +70,7 @@
     ],
 
     export_shared_lib_headers: [
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
     ],
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 31257d5..0a8188f 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -63,48 +63,53 @@
     return audioDeviceInAllUsbSet;
 }
 
-bool deviceTypesToString(const DeviceTypeSet &deviceTypes, std::string &str) {
+const DeviceTypeSet& getAudioDeviceOutAllBleSet() {
+    static const DeviceTypeSet audioDeviceOutAllBleSet = DeviceTypeSet(
+            std::begin(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY),
+            std::end(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY));
+    return audioDeviceOutAllBleSet;
+}
+
+const DeviceTypeSet& getAudioDeviceOutLeAudioUnicastSet() {
+    static const DeviceTypeSet audioDeviceOutLeAudioUnicastSet = DeviceTypeSet(
+            std::begin(AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY),
+            std::end(AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY));
+    return audioDeviceOutLeAudioUnicastSet;
+}
+
+std::string deviceTypesToString(const DeviceTypeSet &deviceTypes) {
     if (deviceTypes.empty()) {
-        str = "Empty device types";
-        return true;
+        return "Empty device types";
     }
-    bool ret = true;
-    for (auto it = deviceTypes.begin(); it != deviceTypes.end();) {
-        std::string deviceTypeStr;
-        ret = audio_is_output_device(*it) ?
-              OutputDeviceConverter::toString(*it, deviceTypeStr) :
-              InputDeviceConverter::toString(*it, deviceTypeStr);
-        if (!ret) {
-            break;
+    std::stringstream ss;
+    for (auto it = deviceTypes.begin(); it != deviceTypes.end(); ++it) {
+        if (it != deviceTypes.begin()) {
+            ss << ", ";
         }
-        str.append(deviceTypeStr);
-        if (++it != deviceTypes.end()) {
-            str.append(" , ");
+        const char* strType = audio_device_to_string(*it);
+        if (strlen(strType) != 0) {
+            ss << strType;
+        } else {
+            ss << "unknown type:0x" << std::hex << *it;
         }
     }
-    if (!ret) {
-        str = "Unknown values";
-    }
-    return ret;
+    return ss.str();
+}
+
+bool deviceTypesToString(const DeviceTypeSet &deviceTypes, std::string &str) {
+    str = deviceTypesToString(deviceTypes);
+    return true;
 }
 
 std::string dumpDeviceTypes(const DeviceTypeSet &deviceTypes) {
-    std::string ret;
-    for (auto it = deviceTypes.begin(); it != deviceTypes.end();) {
-        std::stringstream ss;
-        ss << "0x" << std::hex << (*it);
-        ret.append(ss.str());
-        if (++it != deviceTypes.end()) {
-            ret.append(" , ");
+    std::stringstream ss;
+    for (auto it = deviceTypes.begin(); it != deviceTypes.end(); ++it) {
+        if (it != deviceTypes.begin()) {
+            ss << ", ";
         }
+        ss << "0x" << std::hex << (*it);
     }
-    return ret;
-}
-
-std::string toString(const DeviceTypeSet& deviceTypes) {
-    std::string ret;
-    deviceTypesToString(deviceTypes, ret);
-    return ret;
+    return ss.str();
 }
 
 } // namespace android
diff --git a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
index c5d7da8..4a7e956 100644
--- a/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
+++ b/media/libaudiofoundation/AudioDeviceTypeAddr.cpp
@@ -25,6 +25,9 @@
 
 namespace android {
 
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
+
 namespace {
 
 static const std::string SUPPRESSED = "SUPPRESSED";
@@ -97,10 +100,13 @@
 
 std::string AudioDeviceTypeAddr::toString(bool includeSensitiveInfo) const {
     std::stringstream sstream;
-    sstream << "type:0x" << std::hex << mType;
+    sstream << audio_device_to_string(mType);
+    if (sstream.str().empty()) {
+        sstream << "unknown type:0x" << std::hex << mType;
+    }
     // IP and MAC address are sensitive information. The sensitive information will be suppressed
     // is `includeSensitiveInfo` is false.
-    sstream << ",@:"
+    sstream << ", @:"
             << (!includeSensitiveInfo && mIsAddressSensitive ? SUPPRESSED : mAddress);
     return sstream.str();
 }
@@ -157,17 +163,16 @@
 }
 
 ConversionResult<AudioDeviceTypeAddr>
-aidl2legacy_AudioDeviceTypeAddress(const media::AudioDevice& aidl) {
-    audio_devices_t type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
-    return AudioDeviceTypeAddr(type, aidl.address);
+aidl2legacy_AudioDeviceTypeAddress(const AudioDevice& aidl) {
+    audio_devices_t type;
+    std::string address;
+    RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(aidl, &type, &address));
+    return AudioDeviceTypeAddr(type, address);
 }
 
-ConversionResult<media::AudioDevice>
+ConversionResult<AudioDevice>
 legacy2aidl_AudioDeviceTypeAddress(const AudioDeviceTypeAddr& legacy) {
-    media::AudioDevice aidl;
-    aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.mType));
-    aidl.address = legacy.getAddress();
-    return aidl;
+    return legacy2aidl_audio_device_AudioDevice(legacy.mType, legacy.getAddress());
 }
 
 } // namespace android
diff --git a/media/libaudiofoundation/AudioGain.cpp b/media/libaudiofoundation/AudioGain.cpp
index 1dee938..47e0edb 100644
--- a/media/libaudiofoundation/AudioGain.cpp
+++ b/media/libaudiofoundation/AudioGain.cpp
@@ -24,22 +24,18 @@
 #define ALOGVV(a...) do { } while(0)
 #endif
 
+#include <math.h>
+
 #include <algorithm>
 
 #include <android-base/stringprintf.h>
 #include <media/AudioGain.h>
 #include <utils/Log.h>
 
-#include <math.h>
-
 namespace android {
 
-AudioGain::AudioGain(int index, bool useInChannelMask)
-{
-    mIndex = index;
-    mUseInChannelMask = useInChannelMask;
-    memset(&mGain, 0, sizeof(struct audio_gain));
-}
+AudioGain::AudioGain(int index, bool isInput)
+        : mIndex(index), mIsInput(isInput) {}
 
 void AudioGain::getDefaultConfig(struct audio_gain_config *config)
 {
@@ -49,12 +45,9 @@
     if ((mGain.mode & AUDIO_GAIN_MODE_JOINT) == AUDIO_GAIN_MODE_JOINT) {
         config->values[0] = mGain.default_value;
     } else {
-        uint32_t numValues;
-        if (mUseInChannelMask) {
-            numValues = audio_channel_count_from_in_mask(mGain.channel_mask);
-        } else {
-            numValues = audio_channel_count_from_out_mask(mGain.channel_mask);
-        }
+        const uint32_t numValues = mIsInput ?
+                audio_channel_count_from_in_mask(mGain.channel_mask) :
+                audio_channel_count_from_out_mask(mGain.channel_mask);
         for (size_t i = 0; i < numValues; i++) {
             config->values[i] = mGain.default_value;
         }
@@ -78,12 +71,9 @@
         if ((config->channel_mask & ~mGain.channel_mask) != 0) {
             return BAD_VALUE;
         }
-        uint32_t numValues;
-        if (mUseInChannelMask) {
-            numValues = audio_channel_count_from_in_mask(config->channel_mask);
-        } else {
-            numValues = audio_channel_count_from_out_mask(config->channel_mask);
-        }
+        const uint32_t numValues = mIsInput ?
+                audio_channel_count_from_in_mask(config->channel_mask) :
+                audio_channel_count_from_out_mask(config->channel_mask);
         for (size_t i = 0; i < numValues; i++) {
             if ((config->values[i] < mGain.min_value) ||
                     (config->values[i] > mGain.max_value)) {
@@ -116,7 +106,7 @@
 bool AudioGain::equals(const sp<AudioGain>& other) const
 {
     return other != nullptr &&
-           mUseInChannelMask == other->mUseInChannelMask &&
+           mIsInput == other->mIsInput &&
            mUseForVolume == other->mUseForVolume &&
            // Compare audio gain
            mGain.mode == other->mGain.mode &&
@@ -129,51 +119,24 @@
            mGain.max_ramp_ms == other->mGain.max_ramp_ms;
 }
 
-status_t AudioGain::writeToParcel(android::Parcel *parcel) const {
-    media::AudioGain parcelable;
-    return writeToParcelable(&parcelable)
-        ?: parcelable.writeToParcel(parcel);
+ConversionResult<AudioGain::Aidl> AudioGain::toParcelable() const {
+    media::audio::common::AudioGain aidl = VALUE_OR_RETURN(
+            legacy2aidl_audio_gain_AudioGain(mGain, mIsInput));
+    aidl.useForVolume = mUseForVolume;
+    media::AudioGainSys aidlSys;
+    aidlSys.index = VALUE_OR_RETURN(convertIntegral<int32_t>(mIndex));
+    aidlSys.isInput = mIsInput;
+    return std::make_pair(aidl, aidlSys);
 }
 
-status_t AudioGain::writeToParcelable(media::AudioGain* parcelable) const {
-    parcelable->index = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mIndex));
-    parcelable->useInChannelMask = mUseInChannelMask;
-    parcelable->useForVolume = mUseForVolume;
-    parcelable->mode = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_gain_mode_t_int32_t_mask(mGain.mode));
-    parcelable->channelMask = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_channel_mask_t_int32_t(mGain.channel_mask));
-    parcelable->minValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.min_value));
-    parcelable->maxValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.max_value));
-    parcelable->defaultValue = VALUE_OR_RETURN_STATUS(
-            convertIntegral<int32_t>(mGain.default_value));
-    parcelable->stepValue = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.step_value));
-    parcelable->minRampMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.min_ramp_ms));
-    parcelable->maxRampMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.max_ramp_ms));
-    return OK;
-}
-
-status_t AudioGain::readFromParcel(const android::Parcel *parcel) {
-    media::AudioGain parcelable;
-    return parcelable.readFromParcel(parcel)
-        ?: readFromParcelable(parcelable);
-}
-
-status_t AudioGain::readFromParcelable(const media::AudioGain& parcelable) {
-    mIndex = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.index));
-    mUseInChannelMask = parcelable.useInChannelMask;
-    mUseForVolume = parcelable.useForVolume;
-    mGain.mode = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_int32_t_audio_gain_mode_t_mask(parcelable.mode));
-    mGain.channel_mask = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_int32_t_audio_channel_mask_t(parcelable.channelMask));
-    mGain.min_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.minValue));
-    mGain.max_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.maxValue));
-    mGain.default_value = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.defaultValue));
-    mGain.step_value = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.stepValue));
-    mGain.min_ramp_ms = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.minRampMs));
-    mGain.max_ramp_ms = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.maxRampMs));
-    return OK;
+ConversionResult<sp<AudioGain>> AudioGain::fromParcelable(const AudioGain::Aidl& aidl) {
+    const media::audio::common::AudioGain& hal = aidl.first;
+    const media::AudioGainSys& sys = aidl.second;
+    auto index = VALUE_OR_RETURN(convertIntegral<int>(sys.index));
+    sp<AudioGain> legacy = sp<AudioGain>::make(index, sys.isInput);
+    legacy->mGain = VALUE_OR_RETURN(aidl2legacy_AudioGain_audio_gain(hal, sys.isInput));
+    legacy->mUseForVolume = hal.useForVolume;
+    return legacy;
 }
 
 bool AudioGains::equals(const AudioGains &other) const
@@ -184,59 +147,30 @@
                       });
 }
 
-status_t AudioGains::writeToParcel(android::Parcel *parcel) const {
-    status_t status = NO_ERROR;
-    if ((status = parcel->writeVectorSize(*this)) != NO_ERROR) return status;
-    for (const auto &audioGain : *this) {
-        if ((status = parcel->writeParcelable(*audioGain)) != NO_ERROR) {
-            break;
-        }
-    }
-    return status;
-}
-
-status_t AudioGains::readFromParcel(const android::Parcel *parcel) {
-    status_t status = NO_ERROR;
-    this->clear();
-    if ((status = parcel->resizeOutVector(this)) != NO_ERROR) return status;
-    for (size_t i = 0; i < this->size(); i++) {
-        this->at(i) = new AudioGain(0, false);
-        if ((status = parcel->readParcelable(this->at(i).get())) != NO_ERROR) {
-            this->clear();
-            break;
-        }
-    }
-    return status;
-}
-
 ConversionResult<sp<AudioGain>>
-aidl2legacy_AudioGain(const media::AudioGain& aidl) {
-    sp<AudioGain> legacy = new AudioGain(0, false);
-    status_t status = legacy->readFromParcelable(aidl);
-    if (status != OK) {
-        return base::unexpected(status);
-    }
-    return legacy;
+aidl2legacy_AudioGain(const AudioGain::Aidl& aidl) {
+    return AudioGain::fromParcelable(aidl);
 }
 
-ConversionResult<media::AudioGain>
+ConversionResult<AudioGain::Aidl>
 legacy2aidl_AudioGain(const sp<AudioGain>& legacy) {
-    media::AudioGain aidl;
-    status_t status = legacy->writeToParcelable(&aidl);
-    if (status != OK) {
-        return base::unexpected(status);
-    }
-    return aidl;
+    return legacy->toParcelable();
 }
 
 ConversionResult<AudioGains>
-aidl2legacy_AudioGains(const std::vector<media::AudioGain>& aidl) {
-    return convertContainer<AudioGains>(aidl, aidl2legacy_AudioGain);
+aidl2legacy_AudioGains(const AudioGains::Aidl& aidl) {
+    return convertContainers<AudioGains>(aidl.first, aidl.second,
+            [](const media::audio::common::AudioGain& g,
+               const media::AudioGainSys& gs) {
+                return aidl2legacy_AudioGain(std::make_pair(g, gs));
+            });
 }
 
-ConversionResult<std::vector<media::AudioGain>>
+ConversionResult<AudioGains::Aidl>
 legacy2aidl_AudioGains(const AudioGains& legacy) {
-    return convertContainer<std::vector<media::AudioGain>>(legacy, legacy2aidl_AudioGain);
+    return convertContainerSplit<
+            std::vector<media::audio::common::AudioGain>,
+            std::vector<media::AudioGainSys>>(legacy, legacy2aidl_AudioGain);
 }
 
 } // namespace android
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index fafabd9..4513323 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -18,13 +18,28 @@
 #include <algorithm>
 #include <utility>
 
-#include <android/media/ExtraAudioDescriptor.h>
 #include <android-base/stringprintf.h>
 #include <media/AudioPort.h>
 #include <utils/Log.h>
 
 namespace android {
 
+void AudioPort::setFlags(uint32_t flags)
+{
+    // force direct flag if offload flag is set: offloading implies a direct output stream
+    // and all common behaviors are driven by checking only the direct flag
+    // this should normally be set appropriately in the policy configuration file
+    if (mRole == AUDIO_PORT_ROLE_SOURCE &&
+            (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+        flags |= AUDIO_OUTPUT_FLAG_DIRECT;
+    }
+    if (useInputChannelMask()) {
+        mFlags.input = static_cast<audio_input_flags_t>(flags);
+    } else {
+        mFlags.output = static_cast<audio_output_flags_t>(flags);
+    }
+}
+
 void AudioPort::importAudioPort(const sp<AudioPort>& port, bool force __unused)
 {
     for (const auto& profileToImport : port->mProfiles) {
@@ -147,9 +162,16 @@
     }
 }
 
-void AudioPort::dump(std::string *dst, int spaces, bool verbose) const {
+void AudioPort::dump(std::string *dst, int spaces, const char* extraInfo, bool verbose) const {
     if (!mName.empty()) {
-        dst->append(base::StringPrintf("%*s- name: %s\n", spaces, "", mName.c_str()));
+        dst->append(base::StringPrintf("\"%s\"%s", mName.c_str(),
+                        extraInfo != nullptr ? "; " : ""));
+    }
+    if (extraInfo != nullptr) {
+        dst->append(base::StringPrintf("%s", extraInfo));
+    }
+    if (!mName.empty() || extraInfo != nullptr) {
+        dst->append("\n");
     }
     if (verbose) {
         std::string profilesStr;
@@ -196,39 +218,59 @@
            mType == other->getType() &&
            mRole == other->getRole() &&
            mProfiles.equals(other->getAudioProfiles()) &&
+           getFlags() == other->getFlags() &&
            mExtraAudioDescriptors == other->getExtraAudioDescriptors();
 }
 
-status_t AudioPort::writeToParcel(Parcel *parcel) const
-{
-    media::AudioPort parcelable;
-    return writeToParcelable(&parcelable)
-        ?: parcelable.writeToParcel(parcel);
-}
-
 status_t AudioPort::writeToParcelable(media::AudioPort* parcelable) const {
-    parcelable->name = mName;
-    parcelable->type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_type_t_AudioPortType(mType));
-    parcelable->role = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_role_t_AudioPortRole(mRole));
-    parcelable->profiles = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioProfileVector(mProfiles));
-    parcelable->extraAudioDescriptors = mExtraAudioDescriptors;
-    parcelable->gains = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioGains(mGains));
+    parcelable->hal.name = mName;
+    parcelable->sys.type = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_type_t_AudioPortType(mType));
+    parcelable->sys.role = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_role_t_AudioPortRole(mRole));
+    auto aidlProfiles = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioProfileVector(mProfiles, useInputChannelMask()));
+    parcelable->hal.profiles = aidlProfiles.first;
+    parcelable->sys.profiles = aidlProfiles.second;
+    parcelable->hal.flags = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_io_flags_AudioIoFlags(mFlags, useInputChannelMask()));
+    parcelable->hal.extraAudioDescriptors = mExtraAudioDescriptors;
+    auto aidlGains = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioGains(mGains));
+    parcelable->hal.gains = aidlGains.first;
+    parcelable->sys.gains = aidlGains.second;
+    if (mType == AUDIO_PORT_TYPE_MIX) {
+        media::audio::common::AudioPortMixExt mixExt{};
+        mixExt.maxOpenStreamCount = maxOpenCount;
+        mixExt.maxActiveStreamCount = maxActiveCount;
+        mixExt.recommendedMuteDurationMs = recommendedMuteDurationMs;
+        parcelable->hal.ext = media::audio::common::AudioPortExt::make<
+                media::audio::common::AudioPortExt::mix>(mixExt);
+    }
     return OK;
 }
 
-status_t AudioPort::readFromParcel(const Parcel *parcel) {
-    media::AudioPort parcelable;
-    return parcelable.readFromParcel(parcel)
-        ?: readFromParcelable(parcelable);
-}
-
 status_t AudioPort::readFromParcelable(const media::AudioPort& parcelable) {
-    mName = parcelable.name;
-    mType = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortType_audio_port_type_t(parcelable.type));
-    mRole = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortRole_audio_port_role_t(parcelable.role));
-    mProfiles = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioProfileVector(parcelable.profiles));
-    mExtraAudioDescriptors = parcelable.extraAudioDescriptors;
-    mGains = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioGains(parcelable.gains));
+    mName = parcelable.hal.name;
+    mType = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioPortType_audio_port_type_t(parcelable.sys.type));
+    mRole = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioPortRole_audio_port_role_t(parcelable.sys.role));
+    mProfiles = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioProfileVector(
+                    std::make_pair(parcelable.hal.profiles, parcelable.sys.profiles),
+                    useInputChannelMask()));
+    mFlags = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioIoFlags_audio_io_flags(parcelable.hal.flags, useInputChannelMask()));
+    mExtraAudioDescriptors = parcelable.hal.extraAudioDescriptors;
+    mGains = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioGains(std::make_pair(parcelable.hal.gains, parcelable.sys.gains)));
+    if (mType == AUDIO_PORT_TYPE_MIX) {
+        const media::audio::common::AudioPortMixExt& mixExt =
+                parcelable.hal.ext.get<media::audio::common::AudioPortExt::mix>();
+        maxOpenCount = mixExt.maxOpenStreamCount;
+        maxActiveCount = mixExt.maxActiveStreamCount;
+        recommendedMuteDurationMs = mixExt.recommendedMuteDurationMs;
+    }
     return OK;
 }
 
@@ -250,6 +292,9 @@
     if (config->config_mask & AUDIO_PORT_CONFIG_GAIN) {
         mGain = config->gain;
     }
+    if (config->config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+        mFlags = config->flags;
+    }
 
     return NO_ERROR;
 }
@@ -303,6 +348,9 @@
     } else {
         dstConfig->config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
     }
+
+    updateField(mFlags, &audio_port_config::flags,
+            dstConfig, srcConfig, AUDIO_PORT_CONFIG_FLAGS, { AUDIO_INPUT_FLAG_NONE });
 }
 
 bool AudioPortConfig::hasGainController(bool canUseForVolume) const
@@ -315,12 +363,14 @@
                            : audioport->getGains().size() > 0;
 }
 
-bool AudioPortConfig::equals(const sp<AudioPortConfig> &other) const
+bool AudioPortConfig::equals(const sp<AudioPortConfig> &other, bool isInput) const
 {
     return other != nullptr &&
            mSamplingRate == other->getSamplingRate() &&
            mFormat == other->getFormat() &&
            mChannelMask == other->getChannelMask() &&
+           (isInput ? mFlags.input == other->getFlags().input :
+                   mFlags.output == other->getFlags().output )&&
            // Compare audio gain config
            mGain.index == other->mGain.index &&
            mGain.mode == other->mGain.mode &&
@@ -330,54 +380,47 @@
            mGain.ramp_duration_ms == other->mGain.ramp_duration_ms;
 }
 
-status_t AudioPortConfig::writeToParcel(Parcel *parcel) const {
-    media::AudioPortConfig parcelable;
-    return writeToParcelable(&parcelable)
-        ?: parcelable.writeToParcel(parcel);
-}
-
-status_t AudioPortConfig::writeToParcelable(media::AudioPortConfig* parcelable) const {
-    parcelable->sampleRate = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mSamplingRate));
-    parcelable->format = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_format_t_AudioFormat(mFormat));
+status_t AudioPortConfig::writeToParcelable(
+        media::audio::common::AudioPortConfig* parcelable, bool isInput) const {
+    media::audio::common::Int aidl_sampleRate;
+    aidl_sampleRate.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mSamplingRate));
+    parcelable->sampleRate = aidl_sampleRate;
+    parcelable->format = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_format_t_AudioFormatDescription(mFormat));
     parcelable->channelMask = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_channel_mask_t_int32_t(mChannelMask));
+            legacy2aidl_audio_channel_mask_t_AudioChannelLayout(mChannelMask, isInput));
     parcelable->id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
-    parcelable->gain.index = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(mGain.index));
-    parcelable->gain.mode = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_gain_mode_t_int32_t_mask(mGain.mode));
-    parcelable->gain.channelMask = VALUE_OR_RETURN_STATUS(
-            legacy2aidl_audio_channel_mask_t_int32_t(mGain.channel_mask));
-    parcelable->gain.rampDurationMs = VALUE_OR_RETURN_STATUS(
-            convertIntegral<int32_t>(mGain.ramp_duration_ms));
-    parcelable->gain.values = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<int32_t>>(
-            mGain.values, convertIntegral<int32_t, int>));
+    media::audio::common::AudioGainConfig aidl_gain = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_gain_config_AudioGainConfig(mGain, isInput));
+    parcelable->gain = aidl_gain;
+    parcelable->flags = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_io_flags_AudioIoFlags(mFlags, isInput));
     return OK;
 }
 
-status_t AudioPortConfig::readFromParcel(const Parcel *parcel) {
-    media::AudioPortConfig parcelable;
-    return parcelable.readFromParcel(parcel)
-        ?: readFromParcelable(parcelable);
-}
-
-status_t AudioPortConfig::readFromParcelable(const media::AudioPortConfig& parcelable) {
-    mSamplingRate = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(parcelable.sampleRate));
-    mFormat = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioFormat_audio_format_t(parcelable.format));
-    mChannelMask = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_int32_t_audio_channel_mask_t(parcelable.channelMask));
-    mId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(parcelable.id));
-    mGain.index = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.gain.index));
-    mGain.mode = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_int32_t_audio_gain_mode_t_mask(parcelable.gain.mode));
-    mGain.channel_mask = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_int32_t_audio_channel_mask_t(parcelable.gain.channelMask));
-    mGain.ramp_duration_ms = VALUE_OR_RETURN_STATUS(
-            convertIntegral<unsigned int>(parcelable.gain.rampDurationMs));
-    if (parcelable.gain.values.size() > std::size(mGain.values)) {
-        return BAD_VALUE;
+status_t AudioPortConfig::readFromParcelable(
+        const media::audio::common::AudioPortConfig& parcelable, bool isInput) {
+    if (parcelable.sampleRate.has_value()) {
+        mSamplingRate = VALUE_OR_RETURN_STATUS(
+                convertIntegral<unsigned int>(parcelable.sampleRate.value().value));
     }
-    for (size_t i = 0; i < parcelable.gain.values.size(); ++i) {
-        mGain.values[i] = VALUE_OR_RETURN_STATUS(convertIntegral<int>(parcelable.gain.values[i]));
+    if (parcelable.format.has_value()) {
+        mFormat = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioFormatDescription_audio_format_t(parcelable.format.value()));
+    }
+    if (parcelable.channelMask.has_value()) {
+        mChannelMask = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+                        parcelable.channelMask.value(), isInput));
+    }
+    mId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(parcelable.id));
+    if (parcelable.gain.has_value()) {
+        mGain = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioGainConfig_audio_gain_config(parcelable.gain.value(), isInput));
+    }
+    if (parcelable.flags.has_value()) {
+        mFlags = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioIoFlags_audio_io_flags(parcelable.flags.value(), isInput));
     }
     return OK;
 }
diff --git a/media/libaudiofoundation/AudioProfile.cpp b/media/libaudiofoundation/AudioProfile.cpp
index 8ac3f73..2170cd8 100644
--- a/media/libaudiofoundation/AudioProfile.cpp
+++ b/media/libaudiofoundation/AudioProfile.cpp
@@ -27,6 +27,8 @@
 
 namespace android {
 
+using media::audio::common::AudioChannelLayout;
+
 bool operator == (const AudioProfile &left, const AudioProfile &right)
 {
     return (left.getFormat() == right.getFormat()) &&
@@ -97,18 +99,14 @@
 
 void AudioProfile::dump(std::string *dst, int spaces) const
 {
-    dst->append(base::StringPrintf("%s%s%s\n", mIsDynamicFormat ? "[dynamic format]" : "",
+    dst->append(base::StringPrintf("\"%s\"; ", mName.c_str()));
+    dst->append(base::StringPrintf("%s%s%s%s", mIsDynamicFormat ? "[dynamic format]" : "",
              mIsDynamicChannels ? "[dynamic channels]" : "",
-             mIsDynamicRate ? "[dynamic rates]" : ""));
-    if (mName.length() != 0) {
-        dst->append(base::StringPrintf("%*s- name: %s\n", spaces, "", mName.c_str()));
-    }
-    std::string formatLiteral;
-    if (FormatConverter::toString(mFormat, formatLiteral)) {
-        dst->append(base::StringPrintf("%*s- format: %s\n", spaces, "", formatLiteral.c_str()));
-    }
+             mIsDynamicRate ? "[dynamic rates]" : "", isDynamic() ? "; " : ""));
+    dst->append(base::StringPrintf("%s (0x%x)\n", audio_format_to_string(mFormat), mFormat));
+
     if (!mSamplingRates.empty()) {
-        dst->append(base::StringPrintf("%*s- sampling rates:", spaces, ""));
+        dst->append(base::StringPrintf("%*ssampling rates: ", spaces, ""));
         for (auto it = mSamplingRates.begin(); it != mSamplingRates.end();) {
             dst->append(base::StringPrintf("%d", *it));
             dst->append(++it == mSamplingRates.end() ? "" : ", ");
@@ -117,7 +115,7 @@
     }
 
     if (!mChannelMasks.empty()) {
-        dst->append(base::StringPrintf("%*s- channel masks:", spaces, ""));
+        dst->append(base::StringPrintf("%*schannel masks: ", spaces, ""));
         for (auto it = mChannelMasks.begin(); it != mChannelMasks.end();) {
             dst->append(base::StringPrintf("0x%04x", *it));
             dst->append(++it == mChannelMasks.end() ? "" : ", ");
@@ -126,19 +124,20 @@
     }
 
     dst->append(base::StringPrintf(
-            "%*s- encapsulation type: %#x\n", spaces, "", mEncapsulationType));
+             "%*s%s\n", spaces, "", audio_encapsulation_type_to_string(mEncapsulationType)));
 }
 
-bool AudioProfile::equals(const sp<AudioProfile>& other) const
+bool AudioProfile::equals(const sp<AudioProfile>& other, bool ignoreDynamicFlags) const
 {
     return other != nullptr &&
            mName.compare(other->mName) == 0 &&
            mFormat == other->getFormat() &&
            mChannelMasks == other->getChannels() &&
            mSamplingRates == other->getSampleRates() &&
-           mIsDynamicFormat == other->isDynamicFormat() &&
-           mIsDynamicChannels == other->isDynamicChannels() &&
-           mIsDynamicRate == other->isDynamicRate() &&
+           (ignoreDynamicFlags ||
+               (mIsDynamicFormat == other->isDynamicFormat() &&
+               mIsDynamicChannels == other->isDynamicChannels() &&
+               mIsDynamicRate == other->isDynamicRate())) &&
            mEncapsulationType == other->getEncapsulationType();
 }
 
@@ -154,67 +153,88 @@
     return *this;
 }
 
-status_t AudioProfile::writeToParcel(Parcel *parcel) const {
-    media::AudioProfile parcelable = VALUE_OR_RETURN_STATUS(toParcelable());
-    return parcelable.writeToParcel(parcel);
- }
+ConversionResult<AudioProfile::Aidl>
+AudioProfile::toParcelable(bool isInput) const {
+    media::audio::common::AudioProfile parcelable = VALUE_OR_RETURN(toCommonParcelable(isInput));
+    media::AudioProfileSys parcelableSys;
+    parcelableSys.isDynamicFormat = mIsDynamicFormat;
+    parcelableSys.isDynamicChannels = mIsDynamicChannels;
+    parcelableSys.isDynamicRate = mIsDynamicRate;
+    return std::make_pair(parcelable, parcelableSys);
+}
 
-ConversionResult<media::AudioProfile>
-AudioProfile::toParcelable() const {
-    media::AudioProfile parcelable;
+ConversionResult<sp<AudioProfile>> AudioProfile::fromParcelable(
+        const AudioProfile::Aidl& aidl, bool isInput) {
+    sp<AudioProfile> legacy = VALUE_OR_RETURN(fromCommonParcelable(aidl.first, isInput));
+    const auto& parcelableSys = aidl.second;
+    legacy->mIsDynamicFormat = parcelableSys.isDynamicFormat;
+    legacy->mIsDynamicChannels = parcelableSys.isDynamicChannels;
+    legacy->mIsDynamicRate = parcelableSys.isDynamicRate;
+    return legacy;
+}
+
+ConversionResult<media::audio::common::AudioProfile>
+AudioProfile::toCommonParcelable(bool isInput) const {
+    media::audio::common::AudioProfile parcelable;
     parcelable.name = mName;
-    parcelable.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(mFormat));
+    parcelable.format = VALUE_OR_RETURN(
+            legacy2aidl_audio_format_t_AudioFormatDescription(mFormat));
+    // Note: legacy 'audio_profile' imposes a limit on the number of
+    // channel masks and sampling rates. That's why it's not used here
+    // and conversions are performed directly on the fields instead
+    // of using 'legacy2aidl_audio_profile_AudioProfile' from AidlConversion.
     parcelable.channelMasks = VALUE_OR_RETURN(
-            convertContainer<std::vector<int32_t>>(mChannelMasks,
-                                                   legacy2aidl_audio_channel_mask_t_int32_t));
-    parcelable.samplingRates = VALUE_OR_RETURN(
+            convertContainer<std::vector<AudioChannelLayout>>(
+            mChannelMasks,
+            [isInput](audio_channel_mask_t m) {
+                return legacy2aidl_audio_channel_mask_t_AudioChannelLayout(m, isInput);
+            }));
+    parcelable.sampleRates = VALUE_OR_RETURN(
             convertContainer<std::vector<int32_t>>(mSamplingRates,
                                                    convertIntegral<int32_t, uint32_t>));
-    parcelable.isDynamicFormat = mIsDynamicFormat;
-    parcelable.isDynamicChannels = mIsDynamicChannels;
-    parcelable.isDynamicRate = mIsDynamicRate;
     parcelable.encapsulationType = VALUE_OR_RETURN(
             legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(mEncapsulationType));
     return parcelable;
 }
 
-status_t AudioProfile::readFromParcel(const Parcel *parcel) {
-    media::AudioProfile parcelable;
-    if (status_t status = parcelable.readFromParcel(parcel); status != OK) {
-        return status;
-    }
-    *this = *VALUE_OR_RETURN_STATUS(fromParcelable(parcelable));
-    return OK;
-}
-
-ConversionResult<sp<AudioProfile>>
-AudioProfile::fromParcelable(const media::AudioProfile& parcelable) {
+ConversionResult<sp<AudioProfile>> AudioProfile::fromCommonParcelable(
+        const media::audio::common::AudioProfile& aidl, bool isInput) {
     sp<AudioProfile> legacy = new AudioProfile();
-    legacy->mName = parcelable.name;
-    legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(parcelable.format));
+    legacy->mName = aidl.name;
+    legacy->mFormat = VALUE_OR_RETURN(
+            aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
     legacy->mChannelMasks = VALUE_OR_RETURN(
-            convertContainer<ChannelMaskSet>(parcelable.channelMasks,
-                                             aidl2legacy_int32_t_audio_channel_mask_t));
+            convertContainer<ChannelMaskSet>(aidl.channelMasks,
+            [isInput](const AudioChannelLayout& l) {
+                return aidl2legacy_AudioChannelLayout_audio_channel_mask_t(l, isInput);
+            }));
     legacy->mSamplingRates = VALUE_OR_RETURN(
-            convertContainer<SampleRateSet>(parcelable.samplingRates,
+            convertContainer<SampleRateSet>(aidl.sampleRates,
                                             convertIntegral<uint32_t, int32_t>));
-    legacy->mIsDynamicFormat = parcelable.isDynamicFormat;
-    legacy->mIsDynamicChannels = parcelable.isDynamicChannels;
-    legacy->mIsDynamicRate = parcelable.isDynamicRate;
     legacy->mEncapsulationType = VALUE_OR_RETURN(
             aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
-                    parcelable.encapsulationType));
+                    aidl.encapsulationType));
     return legacy;
 }
 
 ConversionResult<sp<AudioProfile>>
-aidl2legacy_AudioProfile(const media::AudioProfile& aidl) {
-    return AudioProfile::fromParcelable(aidl);
+aidl2legacy_AudioProfile(const AudioProfile::Aidl& aidl, bool isInput) {
+    return AudioProfile::fromParcelable(aidl, isInput);
 }
 
-ConversionResult<media::AudioProfile>
-legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy) {
-    return legacy->toParcelable();
+ConversionResult<AudioProfile::Aidl>
+legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy, bool isInput) {
+    return legacy->toParcelable(isInput);
+}
+
+ConversionResult<sp<AudioProfile>>
+aidl2legacy_AudioProfile_common(const media::audio::common::AudioProfile& aidl, bool isInput) {
+    return AudioProfile::fromCommonParcelable(aidl, isInput);
+}
+
+ConversionResult<media::audio::common::AudioProfile>
+legacy2aidl_AudioProfile_common(const sp<AudioProfile>& legacy, bool isInput) {
+    return legacy->toCommonParcelable(isInput);
 }
 
 ssize_t AudioProfileVector::add(const sp<AudioProfile> &profile)
@@ -307,10 +327,10 @@
     return false;
 }
 
-bool AudioProfileVector::contains(const sp<AudioProfile>& profile) const
+bool AudioProfileVector::contains(const sp<AudioProfile>& profile, bool ignoreDynamicFlags) const
 {
     for (const auto& audioProfile : *this) {
-        if (audioProfile->equals(profile)) {
+        if (audioProfile->equals(profile, ignoreDynamicFlags)) {
             return true;
         }
     }
@@ -319,42 +339,16 @@
 
 void AudioProfileVector::dump(std::string *dst, int spaces) const
 {
-    dst->append(base::StringPrintf("%*s- Profiles:\n", spaces, ""));
+    dst->append(base::StringPrintf("%*s- Profiles (%zu):\n", spaces - 2, "", size()));
     for (size_t i = 0; i < size(); i++) {
-        dst->append(base::StringPrintf("%*sProfile %zu:", spaces + 4, "", i));
+        const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", i + 1);
+        dst->append(prefix);
         std::string profileStr;
-        at(i)->dump(&profileStr, spaces + 8);
+        at(i)->dump(&profileStr, prefix.size());
         dst->append(profileStr);
     }
 }
 
-status_t AudioProfileVector::writeToParcel(Parcel *parcel) const
-{
-    status_t status = NO_ERROR;
-    if ((status = parcel->writeVectorSize(*this)) != NO_ERROR) return status;
-    for (const auto &audioProfile : *this) {
-        if ((status = parcel->writeParcelable(*audioProfile)) != NO_ERROR) {
-            break;
-        }
-    }
-    return status;
-}
-
-status_t AudioProfileVector::readFromParcel(const Parcel *parcel)
-{
-    status_t status = NO_ERROR;
-    this->clear();
-    if ((status = parcel->resizeOutVector(this)) != NO_ERROR) return status;
-    for (size_t i = 0; i < this->size(); ++i) {
-        this->at(i) = new AudioProfile(AUDIO_FORMAT_DEFAULT, AUDIO_CHANNEL_NONE, 0 /*sampleRate*/);
-        if ((status = parcel->readParcelable(this->at(i).get())) != NO_ERROR) {
-            this->clear();
-            break;
-        }
-    }
-    return status;
-}
-
 bool AudioProfileVector::equals(const AudioProfileVector& other) const
 {
     return std::equal(begin(), end(), other.begin(), other.end(),
@@ -363,14 +357,31 @@
                       });
 }
 
-ConversionResult<AudioProfileVector>
-aidl2legacy_AudioProfileVector(const std::vector<media::AudioProfile>& aidl) {
-    return convertContainer<AudioProfileVector>(aidl, aidl2legacy_AudioProfile);
+void AudioProfileVector::addAllValidProfiles(const AudioProfileVector& audioProfiles) {
+    for (const auto& audioProfile : audioProfiles) {
+        if (audioProfile->isValid() && !contains(audioProfile, true /*ignoreDynamicFlags*/)) {
+            add(audioProfile);
+        }
+    }
 }
 
-ConversionResult<std::vector<media::AudioProfile>>
-legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy) {
-    return convertContainer<std::vector<media::AudioProfile>>(legacy, legacy2aidl_AudioProfile);
+ConversionResult<AudioProfileVector>
+aidl2legacy_AudioProfileVector(const AudioProfileVector::Aidl& aidl, bool isInput) {
+    return convertContainers<AudioProfileVector>(aidl.first, aidl.second,
+            [isInput](const media::audio::common::AudioProfile& p,
+                      const media::AudioProfileSys& ps) {
+                return aidl2legacy_AudioProfile(std::make_pair(p, ps), isInput);
+            });
+}
+
+ConversionResult<AudioProfileVector::Aidl>
+legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy, bool isInput) {
+    return convertContainerSplit<
+            std::vector<media::audio::common::AudioProfile>,
+            std::vector<media::AudioProfileSys>>(legacy,
+            [isInput](const sp<AudioProfile>& p) {
+                return legacy2aidl_AudioProfile(p, isInput);
+            });
 }
 
 AudioProfileVector intersectAudioProfiles(const AudioProfileVector& profiles1,
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 5cfea81..5ffbffc 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -30,16 +30,20 @@
 {
 }
 
-DeviceDescriptorBase::DeviceDescriptorBase(audio_devices_t type, const std::string& address) :
-        DeviceDescriptorBase(AudioDeviceTypeAddr(type, address))
+DeviceDescriptorBase::DeviceDescriptorBase(
+        audio_devices_t type, const std::string& address,
+        const FormatVector &encodedFormats) :
+        DeviceDescriptorBase(AudioDeviceTypeAddr(type, address), encodedFormats)
 {
 }
 
-DeviceDescriptorBase::DeviceDescriptorBase(const AudioDeviceTypeAddr &deviceTypeAddr) :
+DeviceDescriptorBase::DeviceDescriptorBase(
+        const AudioDeviceTypeAddr &deviceTypeAddr, const FormatVector &encodedFormats) :
         AudioPort("", AUDIO_PORT_TYPE_DEVICE,
                   audio_is_output_device(deviceTypeAddr.mType) ? AUDIO_PORT_ROLE_SINK :
                                          AUDIO_PORT_ROLE_SOURCE),
-        mDeviceTypeAddr(deviceTypeAddr)
+        mDeviceTypeAddr(deviceTypeAddr),
+        mEncodedFormats(encodedFormats)
 {
     if (mDeviceTypeAddr.address().empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
         mDeviceTypeAddr.setAddress("0");
@@ -106,32 +110,23 @@
     return NO_ERROR;
 }
 
-void DeviceDescriptorBase::dump(std::string *dst, int spaces, int index,
+void DeviceDescriptorBase::dump(std::string *dst, int spaces,
                                 const char* extraInfo, bool verbose) const
 {
-    dst->append(base::StringPrintf("%*sDevice %d:\n", spaces, "", index + 1));
     if (mId != 0) {
-        dst->append(base::StringPrintf("%*s- id: %2d\n", spaces, "", mId));
+        dst->append(base::StringPrintf("Port ID: %d; ", mId));
     }
-
     if (extraInfo != nullptr) {
-        dst->append(extraInfo);
+        dst->append(base::StringPrintf("%s; ", extraInfo));
     }
-
-    dst->append(base::StringPrintf("%*s- type: %-48s\n",
-            spaces, "", ::android::toString(mDeviceTypeAddr.mType).c_str()));
+    dst->append(base::StringPrintf("{%s}\n",
+                    mDeviceTypeAddr.toString(true /*includeSensitiveInfo*/).c_str()));
 
     dst->append(base::StringPrintf(
-            "%*s- supported encapsulation modes: %u\n", spaces, "", mEncapsulationModes));
-    dst->append(base::StringPrintf(
-            "%*s- supported encapsulation metadata types: %u\n",
-            spaces, "", mEncapsulationMetadataTypes));
+                    "%*sEncapsulation modes: %u, metadata types: %u\n", spaces, "",
+                    mEncapsulationModes, mEncapsulationMetadataTypes));
 
-    if (mDeviceTypeAddr.address().size() != 0) {
-        dst->append(base::StringPrintf(
-                "%*s- address: %-32s\n", spaces, "", mDeviceTypeAddr.getAddress()));
-    }
-    AudioPort::dump(dst, spaces, verbose);
+    AudioPort::dump(dst, spaces, nullptr, verbose);
 }
 
 std::string DeviceDescriptorBase::toString(bool includeSensitiveInfo) const
@@ -148,60 +143,83 @@
     AudioPort::log("  ");
 }
 
+template<typename T>
+bool checkEqual(const T& f1, const T& f2)
+{
+    std::set<typename T::value_type> s1(f1.begin(), f1.end());
+    std::set<typename T::value_type> s2(f2.begin(), f2.end());
+    return s1 == s2;
+}
+
 bool DeviceDescriptorBase::equals(const sp<DeviceDescriptorBase> &other) const
 {
     return other != nullptr &&
            static_cast<const AudioPort*>(this)->equals(other) &&
-           static_cast<const AudioPortConfig*>(this)->equals(other) &&
-           mDeviceTypeAddr.equals(other->mDeviceTypeAddr);
+           static_cast<const AudioPortConfig*>(this)->equals(other, useInputChannelMask()) &&
+           mDeviceTypeAddr.equals(other->mDeviceTypeAddr) &&
+           checkEqual(mEncodedFormats, other->mEncodedFormats);
 }
 
-
-status_t DeviceDescriptorBase::writeToParcel(Parcel *parcel) const
+bool DeviceDescriptorBase::supportsFormat(audio_format_t format)
 {
-    media::AudioPort parcelable;
-    return writeToParcelable(&parcelable)
-        ?: parcelable.writeToParcel(parcel);
+    if (mEncodedFormats.empty()) {
+        return true;
+    }
+
+    for (const auto& devFormat : mEncodedFormats) {
+        if (devFormat == format) {
+            return true;
+        }
+    }
+    return false;
 }
 
 status_t DeviceDescriptorBase::writeToParcelable(media::AudioPort* parcelable) const {
     AudioPort::writeToParcelable(parcelable);
-    AudioPortConfig::writeToParcelable(&parcelable->activeConfig);
-    parcelable->id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
+    AudioPortConfig::writeToParcelable(&parcelable->sys.activeConfig.hal, useInputChannelMask());
+    parcelable->hal.id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
+    parcelable->sys.activeConfig.hal.portId = parcelable->hal.id;
 
-    media::AudioPortDeviceExt ext;
-    ext.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(mDeviceTypeAddr));
-    ext.encapsulationModes = VALUE_OR_RETURN_STATUS(
+    media::audio::common::AudioPortDeviceExt deviceExt;
+    deviceExt.device = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_AudioDeviceTypeAddress(mDeviceTypeAddr));
+    deviceExt.encodedFormats = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<media::audio::common::AudioFormatDescription>>(
+                    mEncodedFormats, legacy2aidl_audio_format_t_AudioFormatDescription));
+    UNION_SET(parcelable->hal.ext, device, deviceExt);
+    media::AudioPortDeviceExtSys deviceSys;
+    deviceSys.encapsulationModes = VALUE_OR_RETURN_STATUS(
             legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
-    ext.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
+    deviceSys.encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
             legacy2aidl_AudioEncapsulationMetadataType_mask(mEncapsulationMetadataTypes));
-    UNION_SET(parcelable->ext, device, std::move(ext));
+    UNION_SET(parcelable->sys.ext, device, deviceSys);
     return OK;
 }
 
-status_t DeviceDescriptorBase::readFromParcel(const Parcel *parcel) {
-    media::AudioPort parcelable;
-    return parcelable.readFromParcel(parcel)
-        ?: readFromParcelable(parcelable);
-}
-
 status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPort& parcelable) {
-    if (parcelable.type != media::AudioPortType::DEVICE) {
+    if (parcelable.sys.type != media::AudioPortType::DEVICE) {
         return BAD_VALUE;
     }
     status_t status = AudioPort::readFromParcelable(parcelable)
-                      ?: AudioPortConfig::readFromParcelable(parcelable.activeConfig);
+            ?: AudioPortConfig::readFromParcelable(
+                    parcelable.sys.activeConfig.hal, useInputChannelMask());
     if (status != OK) {
         return status;
     }
 
-    media::AudioPortDeviceExt ext = VALUE_OR_RETURN_STATUS(UNION_GET(parcelable.ext, device));
+    media::audio::common::AudioPortDeviceExt deviceExt = VALUE_OR_RETURN_STATUS(
+            UNION_GET(parcelable.hal.ext, device));
     mDeviceTypeAddr = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceTypeAddress(ext.device));
+            aidl2legacy_AudioDeviceTypeAddress(deviceExt.device));
+    mEncodedFormats = VALUE_OR_RETURN_STATUS(
+            convertContainer<FormatVector>(deviceExt.encodedFormats,
+                    aidl2legacy_AudioFormatDescription_audio_format_t));
+    media::AudioPortDeviceExtSys deviceSys = VALUE_OR_RETURN_STATUS(
+            UNION_GET(parcelable.sys.ext, device));
     mEncapsulationModes = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioEncapsulationMode_mask(ext.encapsulationModes));
+            aidl2legacy_AudioEncapsulationMode_mask(deviceSys.encapsulationModes));
     mEncapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioEncapsulationMetadataType_mask(ext.encapsulationMetadataTypes));
+            aidl2legacy_AudioEncapsulationMetadataType_mask(deviceSys.encapsulationMetadataTypes));
     return OK;
 }
 
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index f6d249a..efe8437 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -1,7 +1,15 @@
 {
   "presubmit": [
     {
-       "name": "audiofoundation_parcelable_test"
+      "name": "audiofoundation_parcelable_test"
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 204b365..b6e6c84 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -40,6 +40,8 @@
 const DeviceTypeSet& getAudioDeviceOutAllUsbSet();
 const DeviceTypeSet& getAudioDeviceInAllSet();
 const DeviceTypeSet& getAudioDeviceInAllUsbSet();
+const DeviceTypeSet& getAudioDeviceOutAllBleSet();
+const DeviceTypeSet& getAudioDeviceOutLeAudioUnicastSet();
 
 template<typename T>
 static std::vector<T> Intersection(const std::set<T>& a, const std::set<T>& b) {
@@ -110,25 +112,7 @@
     return types;
 }
 
-// FIXME: This is temporary helper function. Remove this when getting rid of all
-//  bit mask usages of audio device types.
-static inline DeviceTypeSet deviceTypesFromBitMask(audio_devices_t types) {
-    DeviceTypeSet deviceTypes;
-    if ((types & AUDIO_DEVICE_BIT_IN) == 0) {
-        for (auto deviceType : AUDIO_DEVICE_OUT_ALL_ARRAY) {
-            if ((types & deviceType) == deviceType) {
-                deviceTypes.insert(deviceType);
-            }
-        }
-    } else {
-        for (auto deviceType : AUDIO_DEVICE_IN_ALL_ARRAY) {
-            if ((types & deviceType) == deviceType) {
-                deviceTypes.insert(deviceType);
-            }
-        }
-    }
-    return deviceTypes;
-}
+std::string deviceTypesToString(const DeviceTypeSet& deviceTypes);
 
 bool deviceTypesToString(const DeviceTypeSet& deviceTypes, std::string &str);
 
@@ -137,7 +121,9 @@
 /**
  * Return human readable string for device types.
  */
-std::string toString(const DeviceTypeSet& deviceTypes);
+inline std::string toString(const DeviceTypeSet& deviceTypes) {
+    return deviceTypesToString(deviceTypes);
+}
 
 
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
index 8edcc58..11aa222 100644
--- a/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
+++ b/media/libaudiofoundation/include/media/AudioDeviceTypeAddr.h
@@ -19,7 +19,7 @@
 #include <string>
 #include <vector>
 
-#include <android/media/AudioDevice.h>
+#include <android/media/audio/common/AudioDevice.h>
 #include <binder/Parcelable.h>
 #include <binder/Parcel.h>
 #include <media/AudioContainers.h>
@@ -32,6 +32,7 @@
 class AudioDeviceTypeAddr : public Parcelable {
 public:
     AudioDeviceTypeAddr() = default;
+    AudioDeviceTypeAddr(const AudioDeviceTypeAddr&) = default;
 
     AudioDeviceTypeAddr(audio_devices_t type, const std::string& address);
 
@@ -88,8 +89,8 @@
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<AudioDeviceTypeAddr>
-aidl2legacy_AudioDeviceTypeAddress(const media::AudioDevice& aidl);
-ConversionResult<media::AudioDevice>
+aidl2legacy_AudioDeviceTypeAddress(const media::audio::common::AudioDevice& aidl);
+ConversionResult<media::audio::common::AudioDevice>
 legacy2aidl_AudioDeviceTypeAddress(const AudioDeviceTypeAddr& legacy);
 
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioGain.h b/media/libaudiofoundation/include/media/AudioGain.h
index a06b686..10088f2 100644
--- a/media/libaudiofoundation/include/media/AudioGain.h
+++ b/media/libaudiofoundation/include/media/AudioGain.h
@@ -16,23 +16,23 @@
 
 #pragma once
 
-#include <android/media/AudioGain.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include <android/media/AudioGainSys.h>
 #include <media/AidlConversion.h>
 #include <utils/Errors.h>
 #include <utils/RefBase.h>
 #include <system/audio.h>
-#include <string>
-#include <vector>
 
 namespace android {
 
-class AudioGain: public RefBase, public Parcelable
+class AudioGain: public RefBase
 {
 public:
-    AudioGain(int index, bool useInChannelMask);
-    virtual ~AudioGain() {}
+    AudioGain(int index, bool isInput);
+    virtual ~AudioGain() = default;
 
     void setMode(audio_gain_mode_t mode) { mGain.mode = mode; }
     const audio_gain_mode_t &getMode() const { return mGain.mode; }
@@ -71,26 +71,24 @@
 
     bool equals(const sp<AudioGain>& other) const;
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
-
-    status_t writeToParcelable(media::AudioGain* parcelable) const;
-    status_t readFromParcelable(const media::AudioGain& parcelable);
+    using Aidl = std::pair<media::audio::common::AudioGain, media::AudioGainSys>;
+    ConversionResult<Aidl> toParcelable() const;
+    static ConversionResult<sp<AudioGain>> fromParcelable(const Aidl& aidl);
 
 private:
     int               mIndex;
-    struct audio_gain mGain;
-    bool              mUseInChannelMask;
+    bool              mIsInput;
+    struct audio_gain mGain = {};
     bool              mUseForVolume = false;
 };
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<sp<AudioGain>>
-aidl2legacy_AudioGain(const media::AudioGain& aidl);
-ConversionResult<media::AudioGain>
+aidl2legacy_AudioGain(const AudioGain::Aidl& aidl);
+ConversionResult<AudioGain::Aidl>
 legacy2aidl_AudioGain(const sp<AudioGain>& legacy);
 
-class AudioGains : public std::vector<sp<AudioGain> >, public Parcelable
+class AudioGains : public std::vector<sp<AudioGain>>
 {
 public:
     bool canUseForVolume() const
@@ -103,7 +101,7 @@
         return false;
     }
 
-    int32_t add(const sp<AudioGain> gain)
+    int32_t add(const sp<AudioGain>& gain)
     {
         push_back(gain);
         return 0;
@@ -111,14 +109,15 @@
 
     bool equals(const AudioGains& other) const;
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
+    using Aidl = std::pair<
+            std::vector<media::audio::common::AudioGain>,
+            std::vector<media::AudioGainSys>>;
 };
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<AudioGains>
-aidl2legacy_AudioGains(const std::vector<media::AudioGain>& aidl);
-ConversionResult<std::vector<media::AudioGain>>
+aidl2legacy_AudioGains(const AudioGains::Aidl& aidl);
+ConversionResult<AudioGains::Aidl>
 legacy2aidl_AudioGains(const AudioGains& legacy);
 
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index 1cee1c9..d6a098f 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -21,7 +21,7 @@
 
 #include <android/media/AudioPort.h>
 #include <android/media/AudioPortConfig.h>
-#include <android/media/ExtraAudioDescriptor.h>
+#include <android/media/audio/common/ExtraAudioDescriptor.h>
 #include <binder/Parcel.h>
 #include <binder/Parcelable.h>
 #include <media/AudioGain.h>
@@ -33,7 +33,7 @@
 
 namespace android {
 
-class AudioPort : public virtual RefBase, public virtual Parcelable
+class AudioPort : public virtual RefBase
 {
 public:
     AudioPort(const std::string& name, audio_port_type_t type,  audio_port_role_t role) :
@@ -47,6 +47,9 @@
     audio_port_type_t getType() const { return mType; }
     audio_port_role_t getRole() const { return mRole; }
 
+    virtual void setFlags(uint32_t flags);
+    uint32_t getFlags() const { return useInputChannelMask() ? mFlags.input : mFlags.output; }
+
     void setGains(const AudioGains &gains) { mGains = gains; }
     const AudioGains &getGains() const { return mGains; }
 
@@ -69,10 +72,10 @@
     AudioProfileVector &getAudioProfiles() { return mProfiles; }
 
     void setExtraAudioDescriptors(
-            const std::vector<media::ExtraAudioDescriptor> extraAudioDescriptors) {
+            const std::vector<media::audio::common::ExtraAudioDescriptor> extraAudioDescriptors) {
         mExtraAudioDescriptors = extraAudioDescriptors;
     }
-    std::vector<media::ExtraAudioDescriptor> &getExtraAudioDescriptors() {
+    std::vector<media::audio::common::ExtraAudioDescriptor> &getExtraAudioDescriptors() {
         return mExtraAudioDescriptors;
     }
 
@@ -93,19 +96,47 @@
                 ((mType == AUDIO_PORT_TYPE_MIX) && (mRole == AUDIO_PORT_ROLE_SINK));
     }
 
-    void dump(std::string *dst, int spaces, bool verbose = true) const;
+    bool isDirectOutput() const
+    {
+        return (mType == AUDIO_PORT_TYPE_MIX) && (mRole == AUDIO_PORT_ROLE_SOURCE) &&
+                ((mFlags.output & AUDIO_OUTPUT_FLAG_DIRECT) != 0);
+    }
+
+    bool isMmap() const
+    {
+        return (mType == AUDIO_PORT_TYPE_MIX)
+                && (((mRole == AUDIO_PORT_ROLE_SOURCE) &&
+                        ((mFlags.output & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0))
+                    || ((mRole == AUDIO_PORT_ROLE_SINK) &&
+                        ((mFlags.input & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0)));
+    }
+
+    void dump(std::string *dst, int spaces,
+              const char* extraInfo = nullptr, bool verbose = true) const;
 
     void log(const char* indent) const;
 
     bool equals(const sp<AudioPort>& other) const;
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
-
     status_t writeToParcelable(media::AudioPort* parcelable) const;
     status_t readFromParcelable(const media::AudioPort& parcelable);
 
     AudioGains mGains; // gain controllers
+    // Maximum number of input or output streams that can be simultaneously
+    // opened for this profile. By convention 0 means no limit. To respect
+    // legacy behavior, initialized to 1 for output profiles and 0 for input
+    // profiles
+    // FIXME: IOProfile code used the same value for both cases.
+    uint32_t maxOpenCount = 1;
+    // Maximum number of input or output streams that can be simultaneously
+    // active for this profile. By convention 0 means no limit. To respect
+    // legacy behavior, initialized to 0 for output profiles and 1 for input
+    // profiles
+    // FIXME: IOProfile code used the same value for both cases.
+    uint32_t maxActiveCount = 1;
+    // Mute duration while changing device on this output profile.
+    uint32_t recommendedMuteDurationMs = 0;
+
 protected:
     std::string  mName;
     audio_port_type_t mType;
@@ -114,7 +145,8 @@
 
     // Audio capabilities that are defined by hardware descriptors when the format is unrecognized
     // by the platform, e.g. short audio descriptor in EDID for HDMI.
-    std::vector<media::ExtraAudioDescriptor> mExtraAudioDescriptors;
+    std::vector<media::audio::common::ExtraAudioDescriptor> mExtraAudioDescriptors;
+    union audio_io_flags mFlags = { .output = AUDIO_OUTPUT_FLAG_NONE };
 private:
     template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
                                         || std::is_same<T, struct audio_port_v7>::value, int> = 0>
@@ -130,7 +162,7 @@
 };
 
 
-class AudioPortConfig : public virtual RefBase, public virtual Parcelable
+class AudioPortConfig : public virtual RefBase
 {
 public:
     virtual ~AudioPortConfig() = default;
@@ -147,15 +179,16 @@
     audio_format_t getFormat() const { return mFormat; }
     audio_channel_mask_t getChannelMask() const { return mChannelMask; }
     audio_port_handle_t getId() const { return mId; }
+    audio_io_flags getFlags() const { return mFlags; }
 
     bool hasGainController(bool canUseForVolume = false) const;
 
-    bool equals(const sp<AudioPortConfig>& other) const;
+    bool equals(const sp<AudioPortConfig>& other, bool isInput) const;
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
-    status_t writeToParcelable(media::AudioPortConfig* parcelable) const;
-    status_t readFromParcelable(const media::AudioPortConfig& parcelable);
+    status_t writeToParcelable(
+            media::audio::common::AudioPortConfig* parcelable, bool isInput) const;
+    status_t readFromParcelable(
+            const media::audio::common::AudioPortConfig& parcelable, bool isInput);
 
 protected:
     unsigned int mSamplingRate = 0u;
@@ -163,6 +196,7 @@
     audio_channel_mask_t mChannelMask = AUDIO_CHANNEL_NONE;
     audio_port_handle_t mId = AUDIO_PORT_HANDLE_NONE;
     struct audio_gain_config mGain = { .index = -1 };
+    union audio_io_flags mFlags = { AUDIO_INPUT_FLAG_NONE };
 };
 
 } // namespace android
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 6a36e78..79dfd12 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -17,11 +17,10 @@
 #pragma once
 
 #include <string>
+#include <utility>
 #include <vector>
 
-#include <android/media/AudioProfile.h>
-#include <binder/Parcel.h>
-#include <binder/Parcelable.h>
+#include <android/media/AudioProfileSys.h>
 #include <media/AidlConversion.h>
 #include <media/AudioContainers.h>
 #include <system/audio.h>
@@ -29,7 +28,7 @@
 
 namespace android {
 
-class AudioProfile final : public RefBase, public Parcelable
+class AudioProfile final : public RefBase
 {
 public:
     static sp<AudioProfile> createFullDynamic(audio_format_t dynamicFormat = AUDIO_FORMAT_DEFAULT);
@@ -70,7 +69,7 @@
     void setDynamicFormat(bool dynamic) { mIsDynamicFormat = dynamic; }
     bool isDynamicFormat() const { return mIsDynamicFormat; }
 
-    bool isDynamic() { return mIsDynamicFormat || mIsDynamicChannels || mIsDynamicRate; }
+    bool isDynamic() const { return mIsDynamicFormat || mIsDynamicChannels || mIsDynamicRate; }
 
     audio_encapsulation_type_t getEncapsulationType() const { return mEncapsulationType; }
     void setEncapsulationType(audio_encapsulation_type_t encapsulationType) {
@@ -79,13 +78,17 @@
 
     void dump(std::string *dst, int spaces) const;
 
-    bool equals(const sp<AudioProfile>& other) const;
+    bool equals(const sp<AudioProfile>& other, bool ignoreDynamicFlags = false) const;
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
+    using Aidl = std::pair<media::audio::common::AudioProfile, media::AudioProfileSys>;
+    ConversionResult<Aidl> toParcelable(bool isInput) const;
+    static ConversionResult<sp<AudioProfile>> fromParcelable(
+            const Aidl& aidl, bool isInput);
 
-    ConversionResult<media::AudioProfile> toParcelable() const;
-    static ConversionResult<sp<AudioProfile>> fromParcelable(const media::AudioProfile& parcelable);
+    ConversionResult<media::audio::common::AudioProfile>
+            toCommonParcelable(bool isInput) const;
+    static ConversionResult<sp<AudioProfile>> fromCommonParcelable(
+        const media::audio::common::AudioProfile& aidl, bool isInput);
 
 private:
 
@@ -106,11 +109,16 @@
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<sp<AudioProfile>>
-aidl2legacy_AudioProfile(const media::AudioProfile& aidl);
-ConversionResult<media::AudioProfile>
-legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy);
+aidl2legacy_AudioProfile(const AudioProfile::Aidl& aidl, bool isInput);
+ConversionResult<AudioProfile::Aidl>
+legacy2aidl_AudioProfile(const sp<AudioProfile>& legacy, bool isInput);
 
-class AudioProfileVector : public std::vector<sp<AudioProfile>>, public Parcelable
+ConversionResult<sp<AudioProfile>>
+aidl2legacy_AudioProfile_common(const media::audio::common::AudioProfile& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioProfile>
+legacy2aidl_AudioProfile_common(const sp<AudioProfile>& legacy, bool isInput);
+
+class AudioProfileVector : public std::vector<sp<AudioProfile>>
 {
 public:
     virtual ~AudioProfileVector() = default;
@@ -131,23 +139,25 @@
     bool hasDynamicProfile() const;
     bool hasDynamicRateFor(audio_format_t format) const;
 
-    bool contains(const sp<AudioProfile>& profile) const;
+    bool contains(const sp<AudioProfile>& profile, bool ignoreDynamicFlags = false) const;
 
     virtual void dump(std::string *dst, int spaces) const;
 
     bool equals(const AudioProfileVector& other) const;
+    void addAllValidProfiles(const AudioProfileVector& audioProfiles);
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
+    using Aidl = std::pair<
+            std::vector<media::audio::common::AudioProfile>,
+            std::vector<media::AudioProfileSys>>;
 };
 
 bool operator == (const AudioProfile &left, const AudioProfile &right);
 
 // Conversion routines, according to AidlConversion.h conventions.
 ConversionResult<AudioProfileVector>
-aidl2legacy_AudioProfileVector(const std::vector<media::AudioProfile>& aidl);
-ConversionResult<std::vector<media::AudioProfile>>
-legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy);
+aidl2legacy_AudioProfileVector(const AudioProfileVector::Aidl& aidl, bool isInput);
+ConversionResult<AudioProfileVector::Aidl>
+legacy2aidl_AudioProfileVector(const AudioProfileVector& legacy, bool isInput);
 
 AudioProfileVector intersectAudioProfiles(const AudioProfileVector& profiles1,
                                           const AudioProfileVector& profiles2);
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index 140ce36..1f0c768 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -36,16 +36,21 @@
 public:
      // Note that empty name refers by convention to a generic device.
     explicit DeviceDescriptorBase(audio_devices_t type);
-    DeviceDescriptorBase(audio_devices_t type, const std::string& address);
-    explicit DeviceDescriptorBase(const AudioDeviceTypeAddr& deviceTypeAddr);
+    DeviceDescriptorBase(audio_devices_t type, const std::string& address,
+            const FormatVector &encodedFormats = FormatVector{});
+    DeviceDescriptorBase(const AudioDeviceTypeAddr& deviceTypeAddr,
+            const FormatVector &encodedFormats = FormatVector{});
 
-    virtual ~DeviceDescriptorBase() {}
+    virtual ~DeviceDescriptorBase() = default;
 
     audio_devices_t type() const { return mDeviceTypeAddr.mType; }
     const std::string& address() const { return mDeviceTypeAddr.address(); }
     void setAddress(const std::string &address);
     const AudioDeviceTypeAddr& getDeviceTypeAddr() const { return mDeviceTypeAddr; }
 
+    const FormatVector& encodedFormats() const { return mEncodedFormats; }
+    bool supportsFormat(audio_format_t format);
+
     // AudioPortConfig
     virtual sp<AudioPort> getAudioPort() const {
         return static_cast<AudioPort*>(const_cast<DeviceDescriptorBase*>(this));
@@ -60,7 +65,7 @@
     status_t setEncapsulationModes(uint32_t encapsulationModes);
     status_t setEncapsulationMetadataTypes(uint32_t encapsulationMetadataTypes);
 
-    void dump(std::string *dst, int spaces, int index,
+    void dump(std::string *dst, int spaces,
               const char* extraInfo = nullptr, bool verbose = true) const;
     void log() const;
 
@@ -74,14 +79,12 @@
 
     bool equals(const sp<DeviceDescriptorBase>& other) const;
 
-    status_t writeToParcel(Parcel* parcel) const override;
-    status_t readFromParcel(const Parcel* parcel) override;
-
     status_t writeToParcelable(media::AudioPort* parcelable) const;
     status_t readFromParcelable(const media::AudioPort& parcelable);
 
 protected:
     AudioDeviceTypeAddr mDeviceTypeAddr;
+    FormatVector        mEncodedFormats;
     uint32_t mEncapsulationModes = 0;
     uint32_t mEncapsulationMetadataTypes = 0;
 private:
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index bb9a5f2..3f1fbea 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -11,12 +11,20 @@
     name: "audiofoundation_parcelable_test",
 
     shared_libs: [
-        "libaudiofoundation",
+        "libbase",
         "libbinder",
         "liblog",
         "libutils",
     ],
 
+    static_libs: [
+        "android.media.audio.common.types-V1-cpp",
+        "audioclient-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libstagefright_foundation",
+    ],
+
     header_libs: [
         "libaudio_system_headers",
     ],
diff --git a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
index 068b5d8..50d8dc8 100644
--- a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
+++ b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
@@ -53,7 +53,7 @@
 
 AudioGains getAudioGainsForTest() {
     AudioGains audioGains;
-    sp<AudioGain> audioGain = new AudioGain(0 /*index*/, false /*useInChannelMask*/);
+    sp<AudioGain> audioGain = new AudioGain(0 /*index*/, false /*isInput*/);
     audioGain->setMode(AUDIO_GAIN_MODE_JOINT);
     audioGain->setChannelMask(AUDIO_CHANNEL_OUT_STEREO);
     audioGain->setMinValueInMb(-3200);
@@ -75,57 +75,74 @@
     return audioProfiles;
 }
 
-TEST(AudioFoundationParcelableTest, ParcelingAudioGain) {
-    Parcel data;
-    AudioGains audioGains = getAudioGainsForTest();
-
-    ASSERT_EQ(data.writeParcelable(audioGains), NO_ERROR);
-    data.setDataPosition(0);
-    AudioGains audioGainsFromParcel;
-    ASSERT_EQ(data.readParcelable(&audioGainsFromParcel), NO_ERROR);
-    ASSERT_TRUE(audioGainsFromParcel.equals(audioGains));
+TEST(AudioFoundationParcelableTest, ParcelingAudioProfile) {
+    sp<AudioProfile> profile = getAudioProfileVectorForTest()[0];
+    auto conv = legacy2aidl_AudioProfile(profile, false /*isInput*/);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = aidl2legacy_AudioProfile(conv.value(), false /*isInput*/);
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_TRUE(profile->equals(convBack.value()));
 }
 
 TEST(AudioFoundationParcelableTest, ParcelingAudioProfileVector) {
-    Parcel data;
-    AudioProfileVector audioProfiles = getAudioProfileVectorForTest();
+    AudioProfileVector profiles = getAudioProfileVectorForTest();
+    auto conv = legacy2aidl_AudioProfileVector(profiles, false /*isInput*/);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = aidl2legacy_AudioProfileVector(conv.value(), false /*isInput*/);
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_TRUE(profiles.equals(convBack.value()));
+}
 
-    ASSERT_EQ(data.writeParcelable(audioProfiles), NO_ERROR);
-    data.setDataPosition(0);
-    AudioProfileVector audioProfilesFromParcel;
-    ASSERT_EQ(data.readParcelable(&audioProfilesFromParcel), NO_ERROR);
-    ASSERT_TRUE(audioProfilesFromParcel.equals(audioProfiles));
+TEST(AudioFoundationParcelableTest, ParcelingAudioGain) {
+    sp<AudioGain> audioGain = getAudioGainsForTest()[0];
+    auto conv = legacy2aidl_AudioGain(audioGain);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = aidl2legacy_AudioGain(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_TRUE(audioGain->equals(convBack.value()));
+}
+
+TEST(AudioFoundationParcelableTest, ParcelingAudioGains) {
+    AudioGains audioGains = getAudioGainsForTest();
+    auto conv = legacy2aidl_AudioGains(audioGains);
+    ASSERT_TRUE(conv.ok());
+    auto convBack = aidl2legacy_AudioGains(conv.value());
+    ASSERT_TRUE(convBack.ok());
+    ASSERT_TRUE(audioGains.equals(convBack.value()));
 }
 
 TEST(AudioFoundationParcelableTest, ParcelingAudioPort) {
-    Parcel data;
     sp<AudioPort> audioPort = new AudioPort(
             "AudioPortName", AUDIO_PORT_TYPE_DEVICE, AUDIO_PORT_ROLE_SINK);
     audioPort->setGains(getAudioGainsForTest());
     audioPort->setAudioProfiles(getAudioProfileVectorForTest());
 
-    ASSERT_EQ(data.writeParcelable(*audioPort), NO_ERROR);
-    data.setDataPosition(0);
+    media::AudioPort parcelable;
+    ASSERT_EQ(NO_ERROR, audioPort->writeToParcelable(&parcelable));
     sp<AudioPort> audioPortFromParcel = new AudioPort(
             "", AUDIO_PORT_TYPE_NONE, AUDIO_PORT_ROLE_NONE);
-    ASSERT_EQ(data.readParcelable(audioPortFromParcel.get()), NO_ERROR);
+    ASSERT_EQ(NO_ERROR, audioPortFromParcel->readFromParcelable(parcelable));
     ASSERT_TRUE(audioPortFromParcel->equals(audioPort));
 }
 
 TEST(AudioFoundationParcelableTest, ParcelingAudioPortConfig) {
+    const bool isInput = false;
     Parcel data;
     sp<AudioPortConfig> audioPortConfig = new AudioPortConfigTestStub();
     audioPortConfig->applyAudioPortConfig(&TEST_AUDIO_PORT_CONFIG);
-
-    ASSERT_EQ(data.writeParcelable(*audioPortConfig), NO_ERROR);
+    media::audio::common::AudioPortConfig parcelable{};
+    ASSERT_EQ(NO_ERROR, audioPortConfig->writeToParcelable(&parcelable, isInput));
+    ASSERT_EQ(NO_ERROR, data.writeParcelable(parcelable));
     data.setDataPosition(0);
+    media::audio::common::AudioPortConfig parcelableFromParcel{};
+    ASSERT_EQ(NO_ERROR, data.readParcelable(&parcelableFromParcel));
     sp<AudioPortConfig> audioPortConfigFromParcel = new AudioPortConfigTestStub();
-    ASSERT_EQ(data.readParcelable(audioPortConfigFromParcel.get()), NO_ERROR);
-    ASSERT_TRUE(audioPortConfigFromParcel->equals(audioPortConfig));
+    ASSERT_EQ(NO_ERROR, audioPortConfigFromParcel->readFromParcelable(
+                    parcelableFromParcel, isInput));
+    ASSERT_TRUE(audioPortConfigFromParcel->equals(audioPortConfig, isInput));
 }
 
 TEST(AudioFoundationParcelableTest, ParcelingDeviceDescriptorBase) {
-    Parcel data;
     sp<DeviceDescriptorBase> desc = new DeviceDescriptorBase(AUDIO_DEVICE_OUT_SPEAKER);
     desc->setGains(getAudioGainsForTest());
     desc->setAudioProfiles(getAudioProfileVectorForTest());
@@ -135,10 +152,10 @@
     ASSERT_EQ(desc->setEncapsulationMetadataTypes(
             AUDIO_ENCAPSULATION_METADATA_TYPE_ALL_POSITION_BITS), NO_ERROR);
 
-    ASSERT_EQ(data.writeParcelable(*desc), NO_ERROR);
-    data.setDataPosition(0);
+    media::AudioPort parcelable;
+    ASSERT_EQ(NO_ERROR, desc->writeToParcelable(&parcelable));
     sp<DeviceDescriptorBase> descFromParcel = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
-    ASSERT_EQ(data.readParcelable(descFromParcel.get()), NO_ERROR);
+    ASSERT_EQ(NO_ERROR, descFromParcel->readFromParcelable(parcelable));
     ASSERT_TRUE(descFromParcel->equals(desc));
 }
 
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index bd24c84..5f63e8d 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -27,9 +27,11 @@
         "libaudiohal@5.0",
         "libaudiohal@6.0",
         "libaudiohal@7.0",
+        "libaudiohal@7.1",
     ],
 
     shared_libs: [
+        "audioclient-types-aidl-cpp",
         "libdl",
         "libhidlbase",
         "liblog",
diff --git a/media/libaudiohal/DevicesFactoryHalInterface.cpp b/media/libaudiohal/DevicesFactoryHalInterface.cpp
index 325a547..5ad26fc 100644
--- a/media/libaudiohal/DevicesFactoryHalInterface.cpp
+++ b/media/libaudiohal/DevicesFactoryHalInterface.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <string>
+
 #include <media/audiohal/DevicesFactoryHalInterface.h>
 #include <media/audiohal/FactoryHalHidl.h>
 
@@ -21,8 +23,10 @@
 
 // static
 sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
+    using namespace std::string_literals;
     return createPreferredImpl<DevicesFactoryHalInterface>(
-            "android.hardware.audio", "IDevicesFactory");
+            std::make_pair("android.hardware.audio"s, "IDevicesFactory"s),
+            std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s));
 }
 
 } // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalInterface.cpp b/media/libaudiohal/EffectsFactoryHalInterface.cpp
index bc3b4c1..8a28f64 100644
--- a/media/libaudiohal/EffectsFactoryHalInterface.cpp
+++ b/media/libaudiohal/EffectsFactoryHalInterface.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <string>
+
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/audiohal/FactoryHalHidl.h>
 
@@ -21,8 +23,10 @@
 
 // static
 sp<EffectsFactoryHalInterface> EffectsFactoryHalInterface::create() {
+    using namespace std::string_literals;
     return createPreferredImpl<EffectsFactoryHalInterface>(
-            "android.hardware.audio.effect", "IEffectsFactory");
+            std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s),
+            std::make_pair("android.hardware.audio"s, "IDevicesFactory"s));
 }
 
 // static
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index e420d07..590fec5 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -16,6 +16,10 @@
 
 #define LOG_TAG "FactoryHalHidl"
 
+#include <algorithm>
+#include <array>
+#include <utility>
+
 #include <media/audiohal/FactoryHalHidl.h>
 
 #include <dlfcn.h>
@@ -28,14 +32,16 @@
 namespace android::detail {
 
 namespace {
-/** Supported HAL versions, in order of preference.
+/** Supported HAL versions, from most recent to least recent.
  */
-const char* sAudioHALVersions[] = {
-    "7.0",
-    "6.0",
-    "5.0",
-    "4.0",
-    nullptr
+#define CONC_VERSION(maj, min) #maj "." #min
+#define DECLARE_VERSION(maj, min) std::make_pair(std::make_pair(maj, min), CONC_VERSION(maj, min))
+static constexpr std::array<std::pair<std::pair<int, int>, const char*>, 5> sAudioHALVersions = {
+    DECLARE_VERSION(7, 1),
+    DECLARE_VERSION(7, 0),
+    DECLARE_VERSION(6, 0),
+    DECLARE_VERSION(5, 0),
+    DECLARE_VERSION(4, 0)
 };
 
 bool createHalService(const std::string& version, const std::string& interface,
@@ -93,11 +99,22 @@
 
 }  // namespace
 
-void* createPreferredImpl(const std::string& package, const std::string& interface) {
-    for (auto version = detail::sAudioHALVersions; version != nullptr; ++version) {
-        void* rawInterface = nullptr;
-        if (hasHalService(package, *version, interface)
-                && createHalService(*version, interface, &rawInterface)) {
+void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface) {
+    auto findMostRecentVersion = [](const InterfaceName& iface) {
+        return std::find_if(detail::sAudioHALVersions.begin(), detail::sAudioHALVersions.end(),
+                [&](const auto& v) { return hasHalService(iface.first, v.second, iface.second); });
+    };
+    auto ifaceVersionIt = findMostRecentVersion(iface);
+    auto siblingVersionIt = findMostRecentVersion(siblingIface);
+    if (ifaceVersionIt != detail::sAudioHALVersions.end() &&
+            siblingVersionIt != detail::sAudioHALVersions.end() &&
+            // same major version
+            ifaceVersionIt->first.first == siblingVersionIt->first.first) {
+        std::string libraryVersion =
+                ifaceVersionIt->first >= siblingVersionIt->first ?
+                ifaceVersionIt->second : siblingVersionIt->second;
+        void* rawInterface;
+        if (createHalService(libraryVersion, iface.second, &rawInterface)) {
             return rawInterface;
         }
     }
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9fa
--- /dev/null
+++ b/media/libaudiohal/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index a2c6e8a..d30883a 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -7,23 +7,28 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_defaults {
-    name: "libaudiohal_default",
-
+filegroup {
+    name: "audio_core_hal_client_sources",
     srcs: [
-        "DeviceHalLocal.cpp",
-        "DevicesFactoryHalHybrid.cpp",
-        "DevicesFactoryHalLocal.cpp",
-        "StreamHalLocal.cpp",
-
-        "ConversionHelperHidl.cpp",
+        "CoreConversionHelperHidl.cpp",
         "DeviceHalHidl.cpp",
         "DevicesFactoryHalHidl.cpp",
-        "EffectBufferHalHidl.cpp",
-        "EffectHalHidl.cpp",
-        "EffectsFactoryHalHidl.cpp",
         "StreamHalHidl.cpp",
     ],
+}
+
+filegroup {
+    name: "audio_effect_hal_client_sources",
+    srcs: [
+        "EffectBufferHalHidl.cpp",
+        "EffectConversionHelperHidl.cpp",
+        "EffectHalHidl.cpp",
+        "EffectsFactoryHalHidl.cpp",
+    ],
+}
+
+cc_defaults {
+    name: "libaudiohal_default",
 
     cflags: [
         "-Wall",
@@ -50,6 +55,7 @@
         "libmedia_helper",
         "libmediautils",
         "libutils",
+        "audioclient-types-aidl-cpp",
     ],
     header_libs: [
         "android.hardware.audio.common.util@all-versions",
@@ -65,6 +71,11 @@
 cc_library_shared {
     name: "libaudiohal@4.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
+    ],
     shared_libs: [
         "android.hardware.audio.common@4.0",
         "android.hardware.audio.common@4.0-util",
@@ -83,6 +94,11 @@
 cc_library_shared {
     name: "libaudiohal@5.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
+    ],
     shared_libs: [
         "android.hardware.audio.common@5.0",
         "android.hardware.audio.common@5.0-util",
@@ -101,6 +117,11 @@
 cc_library_shared {
     name: "libaudiohal@6.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
+    ],
     shared_libs: [
         "android.hardware.audio.common@6.0",
         "android.hardware.audio.common@6.0-util",
@@ -116,16 +137,41 @@
     ]
 }
 
+cc_library_static {
+    name: "libaudiohal.effect@7.0",
+    defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_effect_hal_client_sources",
+    ],
+    static_libs: [
+        "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-util",
+        "android.hardware.audio.effect@7.0",
+        "android.hardware.audio.effect@7.0-util",
+    ],
+    cflags: [
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=0",
+        "-include common/all-versions/VersionMacro.h",
+    ]
+}
+
 cc_library_shared {
     name: "libaudiohal@7.0",
     defaults: ["libaudiohal_default"],
-    shared_libs: [
+    srcs: [
+        ":audio_core_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
+    ],
+    static_libs: [
         "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-enums",
         "android.hardware.audio.common@7.0-util",
         "android.hardware.audio.effect@7.0",
         "android.hardware.audio.effect@7.0-util",
         "android.hardware.audio@7.0",
         "android.hardware.audio@7.0-util",
+        "libaudiohal.effect@7.0",
     ],
     cflags: [
         "-DMAJOR_VERSION=7",
@@ -133,3 +179,31 @@
         "-include common/all-versions/VersionMacro.h",
     ]
 }
+
+cc_library_shared {
+    name: "libaudiohal@7.1",
+    defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        "EffectsFactoryHalHidlEntry.cpp",
+    ],
+    static_libs: [
+        "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.0-util",
+        "android.hardware.audio.common@7.1-enums",
+        "android.hardware.audio.common@7.1-util",
+        "android.hardware.audio.effect@7.0",
+        "android.hardware.audio.effect@7.0-util",
+        "android.hardware.audio@7.0",
+        "android.hardware.audio@7.1",
+        "android.hardware.audio@7.1-util",
+        "libaudiohal.effect@7.0",
+    ],
+    cflags: [
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=1",
+        "-DCOMMON_TYPES_MINOR_VERSION=0",
+        "-DCORE_TYPES_MINOR_VERSION=0",
+        "-include common/all-versions/VersionMacro.h",
+    ]
+}
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index 2909013..6e2c831 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -17,34 +17,25 @@
 #ifndef ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
 #define ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
 
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include <functional>
+
 #include <hidl/HidlSupport.h>
 #include <system/audio.h>
-#include <utils/String8.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
-
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using CoreResult = ::android::hardware::audio::CPP_VERSION::Result;
-
-using ::android::hardware::Return;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
 
 namespace android {
-namespace CPP_VERSION {
 
+template<typename HalResult>
 class ConversionHelperHidl {
   protected:
-    static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
-    static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
-    static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
-    static void argsFromHal(const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs);
+    using HalResultConverter = std::function<status_t(const HalResult&)>;
+    const std::string mClassName;
 
-    ConversionHelperHidl(const char* className);
+    ConversionHelperHidl(std::string_view className, HalResultConverter resultConv)
+            : mClassName(className), mResultConverter(resultConv) {}
 
     template<typename R, typename T>
-    status_t processReturn(const char* funcName, const Return<R>& ret, T *retval) {
+    status_t processReturn(const char* funcName,
+            const ::android::hardware::Return<R>& ret, T *retval) {
         if (ret.isOk()) {
             // This way it also works for enum class to unscoped enum conversion.
             *retval = static_cast<T>(static_cast<R>(ret));
@@ -54,38 +45,42 @@
     }
 
     template<typename T>
-    status_t processReturn(const char* funcName, const Return<T>& ret) {
+    status_t processReturn(const char* funcName, const ::android::hardware::Return<T>& ret) {
         if (!ret.isOk()) {
             emitError(funcName, ret.description().c_str());
         }
         return ret.isOk() ? OK : FAILED_TRANSACTION;
     }
 
-    status_t processReturn(const char* funcName, const Return<CoreResult>& ret) {
+    status_t processReturn(const char* funcName,
+            const ::android::hardware::Return<HalResult>& ret) {
         if (!ret.isOk()) {
             emitError(funcName, ret.description().c_str());
         }
-        return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
+        return ret.isOk() ? mResultConverter(ret) : FAILED_TRANSACTION;
     }
 
     template<typename T>
     status_t processReturn(
-            const char* funcName, const Return<T>& ret, CoreResult retval) {
+            const char* funcName, const ::android::hardware::Return<T>& ret, HalResult retval) {
         if (!ret.isOk()) {
             emitError(funcName, ret.description().c_str());
         }
-        return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
+        return ret.isOk() ? mResultConverter(retval) : FAILED_TRANSACTION;
+    }
+
+    const std::string& getClassName() const {
+        return mClassName;
     }
 
   private:
-    const char* mClassName;
+    HalResultConverter mResultConverter;
 
-    static status_t analyzeResult(const CoreResult& result);
-
-    void emitError(const char* funcName, const char* description);
+    void emitError(const char* funcName, const char* description) {
+        ALOGE("%s %p %s: %s (from rpc)", mClassName.c_str(), this, funcName, description);
+    }
 };
 
-}  // namespace CPP_VERSION
 }  // namespace android
 
 #endif // ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/CoreConversionHelperHidl.cpp
similarity index 84%
rename from media/libaudiohal/impl/ConversionHelperHidl.cpp
rename to media/libaudiohal/impl/CoreConversionHelperHidl.cpp
index 0503698..2ac8a42 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/CoreConversionHelperHidl.cpp
@@ -21,16 +21,16 @@
 #include <media/AudioParameter.h>
 #include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
 
 namespace android {
-namespace CPP_VERSION {
 
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
 // static
-status_t ConversionHelperHidl::keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
+status_t CoreConversionHelperHidl::keysFromHal(
+        const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
     AudioParameter halKeys(keys);
     if (halKeys.size() == 0) return BAD_VALUE;
     hidlKeys->resize(halKeys.size());
@@ -75,7 +75,7 @@
 }
 
 // static
-status_t ConversionHelperHidl::parametersFromHal(
+status_t CoreConversionHelperHidl::parametersFromHal(
         const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams) {
     AudioParameter params(kvPairs);
     if (params.size() == 0) return BAD_VALUE;
@@ -91,7 +91,7 @@
 }
 
 // static
-void ConversionHelperHidl::parametersToHal(
+void CoreConversionHelperHidl::parametersToHal(
         const hidl_vec<ParameterValue>& parameters, String8 *values) {
     AudioParameter params;
     for (size_t i = 0; i < parameters.size(); ++i) {
@@ -100,12 +100,11 @@
     values->setTo(params.toString());
 }
 
-ConversionHelperHidl::ConversionHelperHidl(const char* className)
-        : mClassName(className) {
-}
+CoreConversionHelperHidl::CoreConversionHelperHidl(std::string_view className)
+        : ConversionHelperHidl<CoreResult>(className, analyzeResult) {}
 
 // static
-void ConversionHelperHidl::argsFromHal(
+void CoreConversionHelperHidl::argsFromHal(
         const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs) {
     hidlArgs->resize(args.size());
     for (size_t i = 0; i < args.size(); ++i) {
@@ -114,20 +113,15 @@
 }
 
 // static
-status_t ConversionHelperHidl::analyzeResult(const Result& result) {
+status_t CoreConversionHelperHidl::analyzeResult(const CoreResult& result) {
     switch (result) {
         case Result::OK: return OK;
         case Result::INVALID_ARGUMENTS: return BAD_VALUE;
         case Result::INVALID_STATE: return NOT_ENOUGH_DATA;
         case Result::NOT_INITIALIZED: return NO_INIT;
         case Result::NOT_SUPPORTED: return INVALID_OPERATION;
-        default: return NO_INIT;
     }
+    return NO_INIT;
 }
 
-void ConversionHelperHidl::emitError(const char* funcName, const char* description) {
-    ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
-}
-
-}  // namespace CPP_VERSION
 }  // namespace android
diff --git a/media/libaudiohal/impl/CoreConversionHelperHidl.h b/media/libaudiohal/impl/CoreConversionHelperHidl.h
new file mode 100644
index 0000000..a4d76f3
--- /dev/null
+++ b/media/libaudiohal/impl/CoreConversionHelperHidl.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
+#define ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
+
+#include "ConversionHelperHidl.h"
+
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
+
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+
+namespace android {
+
+class CoreConversionHelperHidl : public ConversionHelperHidl<CoreResult> {
+  protected:
+    static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
+    static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
+    static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
+    static void argsFromHal(const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs);
+
+    CoreConversionHelperHidl(std::string_view className);
+
+  private:
+    static status_t analyzeResult(const CoreResult& result);
+};
+
+}  // namespace android
+
+#endif // ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index aa94eea..0cdf621 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -20,8 +20,10 @@
 //#define LOG_NDEBUG 0
 
 #include <cutils/native_handle.h>
+#include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioContainers.h>
+#include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
 
 #include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
@@ -30,27 +32,43 @@
 #include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
-#include "EffectHalHidl.h"
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
-using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 
 namespace android {
-namespace CPP_VERSION {
 
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
-using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
+#define TIME_CHECK() auto timeCheck = \
+        mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
 
-DeviceHalHidl::DeviceHalHidl(const sp<IDevice>& device)
-        : ConversionHelperHidl("Device"), mDevice(device),
-          mPrimaryDevice(IPrimaryDevice::castFrom(device)) {
+DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
+        : CoreConversionHelperHidl("DeviceHalHidl"), mDevice(device) {
+}
+
+DeviceHalHidl::DeviceHalHidl(
+        const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device)
+        : CoreConversionHelperHidl("DeviceHalHidl"),
+#if MAJOR_VERSION <= 6 || (MAJOR_VERSION == 7 && MINOR_VERSION == 0)
+          mDevice(device),
+#endif
+          mPrimaryDevice(device) {
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    auto getDeviceRet = mPrimaryDevice->getDevice();
+    if (getDeviceRet.isOk()) {
+        mDevice = getDeviceRet;
+    } else {
+        ALOGE("Call to IPrimaryDevice.getDevice has failed: %s",
+                getDeviceRet.description().c_str());
+    }
+#endif
 }
 
 DeviceHalHidl::~DeviceHalHidl() {
@@ -70,22 +88,26 @@
 }
 
 status_t DeviceHalHidl::initCheck() {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("initCheck", mDevice->initCheck());
 }
 
 status_t DeviceHalHidl::setVoiceVolume(float volume) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     if (mPrimaryDevice == 0) return INVALID_OPERATION;
     return processReturn("setVoiceVolume", mPrimaryDevice->setVoiceVolume(volume));
 }
 
 status_t DeviceHalHidl::setMasterVolume(float volume) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("setMasterVolume", mDevice->setMasterVolume(volume));
 }
 
 status_t DeviceHalHidl::getMasterVolume(float *volume) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMasterVolume(
@@ -99,17 +121,20 @@
 }
 
 status_t DeviceHalHidl::setMode(audio_mode_t mode) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     if (mPrimaryDevice == 0) return INVALID_OPERATION;
     return processReturn("setMode", mPrimaryDevice->setMode(AudioMode(mode)));
 }
 
 status_t DeviceHalHidl::setMicMute(bool state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("setMicMute", mDevice->setMicMute(state));
 }
 
 status_t DeviceHalHidl::getMicMute(bool *state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMicMute(
@@ -123,11 +148,13 @@
 }
 
 status_t DeviceHalHidl::setMasterMute(bool state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("setMasterMute", mDevice->setMasterMute(state));
 }
 
 status_t DeviceHalHidl::getMasterMute(bool *state) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMasterMute(
@@ -141,6 +168,7 @@
 }
 
 status_t DeviceHalHidl::setParameters(const String8& kvPairs) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     hidl_vec<ParameterValue> hidlParams;
     status_t status = parametersFromHal(kvPairs, &hidlParams);
@@ -151,6 +179,7 @@
 }
 
 status_t DeviceHalHidl::getParameters(const String8& keys, String8 *values) {
+    TIME_CHECK();
     values->clear();
     if (mDevice == 0) return NO_INIT;
     hidl_vec<hidl_string> hidlKeys;
@@ -171,6 +200,7 @@
 
 status_t DeviceHalHidl::getInputBufferSize(
         const struct audio_config *config, size_t *size) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioConfig hidlConfig;
     HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
@@ -193,6 +223,7 @@
         struct audio_config *config,
         const char *address,
         sp<StreamOutHalInterface> *outStream) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
     if (status_t status = CoreUtils::deviceAddressFromHal(deviceType, address, &hidlDevice);
@@ -204,17 +235,32 @@
             status != OK) {
         return status;
     }
+
+#if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
+    //TODO: b/193496180 use spatializer flag at audio HAL when available
+    if ((flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
+        flags = (audio_output_flags_t)(flags & ~AUDIO_OUTPUT_FLAG_SPATIALIZER);
+        flags = (audio_output_flags_t)
+                (flags | AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
+    }
+#endif
+
     CoreUtils::AudioOutputFlags hidlFlags;
     if (status_t status = CoreUtils::audioOutputFlagsFromHal(flags, &hidlFlags); status != OK) {
         return status;
     }
     Result retval = Result::NOT_INITIALIZED;
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    Return<void> ret = mDevice->openOutputStream_7_1(
+#else
     Return<void> ret = mDevice->openOutputStream(
+#endif
             handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
             {} /* metadata */,
 #endif
-            [&](Result r, const sp<IStreamOut>& result, const AudioConfig& suggestedConfig) {
+            [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
+                    const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
                     *outStream = new StreamOutHalHidl(result);
@@ -234,6 +280,7 @@
         audio_devices_t outputDevice,
         const char *outputDeviceAddress,
         sp<StreamInHalInterface> *inStream) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
     if (status_t status = CoreUtils::deviceAddressFromHal(devices, address, &hidlDevice);
@@ -284,7 +331,9 @@
 #endif
     Return<void> ret = mDevice->openInputStream(
             handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
-            [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
+            [&](Result r,
+                const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& result,
+                    const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
                     *inStream = new StreamInHalHidl(result);
@@ -295,6 +344,7 @@
 }
 
 status_t DeviceHalHidl::supportsAudioPatches(bool *supportsPatches) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("supportsAudioPatches", mDevice->supportsAudioPatches(), supportsPatches);
 }
@@ -305,6 +355,7 @@
         unsigned int num_sinks,
         const struct audio_port_config *sinks,
         audio_patch_handle_t *patch) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     if (patch == nullptr) return BAD_VALUE;
 
@@ -350,6 +401,7 @@
 }
 
 status_t DeviceHalHidl::releaseAudioPatch(audio_patch_handle_t patch) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
 }
@@ -372,10 +424,12 @@
 }
 
 status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+    TIME_CHECK();
     return getAudioPortImpl(port);
 }
 
 status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+    TIME_CHECK();
 #if MAJOR_VERSION >= 7
     return getAudioPortImpl(port);
 #else
@@ -396,6 +450,7 @@
 }
 
 status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioPortConfig hidlConfig;
     HidlUtils::audioPortConfigFromHal(*config, &hidlConfig);
@@ -410,6 +465,7 @@
 }
 #elif MAJOR_VERSION >= 4
 status_t DeviceHalHidl::getMicrophones(std::vector<media::MicrophoneInfo> *microphonesInfo) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mDevice->getMicrophones(
@@ -430,10 +486,10 @@
 #if MAJOR_VERSION >= 6
 status_t DeviceHalHidl::addDeviceEffect(
         audio_port_handle_t device, sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
-            static_cast<AudioPortHandle>(device),
-            static_cast<EffectHalHidl*>(effect.get())->effectId()));
+            static_cast<AudioPortHandle>(device), effect->effectId()));
 }
 #else
 status_t DeviceHalHidl::addDeviceEffect(
@@ -445,10 +501,10 @@
 #if MAJOR_VERSION >= 6
 status_t DeviceHalHidl::removeDeviceEffect(
         audio_port_handle_t device, sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
-            static_cast<AudioPortHandle>(device),
-            static_cast<EffectHalHidl*>(effect.get())->effectId()));
+            static_cast<AudioPortHandle>(device), effect->effectId()));
 }
 #else
 status_t DeviceHalHidl::removeDeviceEffect(
@@ -457,7 +513,48 @@
 }
 #endif
 
+status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    TIME_CHECK();
+    if (mDevice == 0) return NO_INIT;
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    if (supportsSetConnectedState7_1) {
+        AudioPort hidlPort;
+        if (status_t result = HidlUtils::audioPortFromHal(*port, &hidlPort); result != NO_ERROR) {
+            return result;
+        }
+        Return<Result> ret = mDevice->setConnectedState_7_1(hidlPort, connected);
+        if (!ret.isOk() || ret != Result::NOT_SUPPORTED) {
+            return processReturn("setConnectedState_7_1", ret);
+        } else if (ret == Result::OK) {
+            return NO_ERROR;
+        }
+        supportsSetConnectedState7_1 = false;
+    }
+#endif
+    DeviceAddress hidlAddress;
+    if (status_t result = CoreUtils::deviceAddressFromHal(
+                    port->ext.device.type, port->ext.device.address, &hidlAddress);
+            result != NO_ERROR) {
+        return result;
+    }
+    return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
+}
+
+error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
+    TIME_CHECK();
+    if (mDevice == 0) return NO_INIT;
+    audio_hw_sync_t value;
+    Result result;
+    Return<void> ret = mDevice->getHwAvSync([&value, &result](Result r, audio_hw_sync_t v) {
+        value = v;
+        result = r;
+    });
+    RETURN_IF_ERROR(processReturn("getHwAvSync", ret, result));
+    return value;
+}
+
 status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
+    TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
     hidlHandle->data[0] = fd;
@@ -480,5 +577,4 @@
     return processReturn("dump", ret);
 }
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 2694ab3..f6519b6 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -20,17 +20,13 @@
 #include PATH(android/hardware/audio/FILE_VERSION/IDevice.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
 #include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/EffectHalInterface.h>
 
-#include "ConversionHelperHidl.h"
-
-using ::android::hardware::audio::CPP_VERSION::IDevice;
-using ::android::hardware::audio::CPP_VERSION::IPrimaryDevice;
-using ::android::hardware::Return;
+#include "CoreConversionHelperHidl.h"
 
 namespace android {
-namespace CPP_VERSION {
 
-class DeviceHalHidl : public DeviceHalInterface, public ConversionHelperHidl
+class DeviceHalHidl : public DeviceHalInterface, public CoreConversionHelperHidl
 {
   public:
     // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
@@ -119,15 +115,40 @@
     status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
     status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
 
+    status_t getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType __unused,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos __unused) override {
+        // TODO: Implement the HAL query when moving to AIDL HAL.
+        return INVALID_OPERATION;
+    }
+
+    int32_t getAAudioMixerBurstCount() override {
+        // TODO: Implement the HAL query when moving to AIDL HAL.
+        return INVALID_OPERATION;
+    }
+
+    int32_t getAAudioHardwareBurstMinUsec() override {
+        // TODO: Implement the HAL query when moving to AIDL HAL.
+        return INVALID_OPERATION;
+    }
+
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
+    error::Result<audio_hw_sync_t> getHwAvSync() override;
+
     status_t dump(int fd, const Vector<String16>& args) override;
 
   private:
     friend class DevicesFactoryHalHidl;
-    sp<IDevice> mDevice;
-    sp<IPrimaryDevice> mPrimaryDevice;  // Null if it's not a primary device.
+    sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
+    // Null if it's not a primary device.
+    sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice> mPrimaryDevice;
+    bool supportsSetConnectedState7_1 = true;
 
     // Can not be constructed directly by clients.
-    explicit DeviceHalHidl(const sp<IDevice>& device);
+    explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
+    explicit DeviceHalHidl(
+            const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device);
 
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
@@ -135,7 +156,6 @@
     template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICE_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
deleted file mode 100644
index e0304af..0000000
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DeviceHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "StreamHalLocal.h"
-
-namespace android {
-namespace CPP_VERSION {
-
-DeviceHalLocal::DeviceHalLocal(audio_hw_device_t *dev)
-        : mDev(dev) {
-}
-
-DeviceHalLocal::~DeviceHalLocal() {
-    int status = audio_hw_device_close(mDev);
-    ALOGW_IF(status, "Error closing audio hw device %p: %s", mDev, strerror(-status));
-    mDev = 0;
-}
-
-status_t DeviceHalLocal::getSupportedDevices(uint32_t *devices) {
-    if (mDev->get_supported_devices == NULL) return INVALID_OPERATION;
-    *devices = mDev->get_supported_devices(mDev);
-    return OK;
-}
-
-status_t DeviceHalLocal::initCheck() {
-    return mDev->init_check(mDev);
-}
-
-status_t DeviceHalLocal::setVoiceVolume(float volume) {
-    return mDev->set_voice_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::setMasterVolume(float volume) {
-    if (mDev->set_master_volume == NULL) return INVALID_OPERATION;
-    return mDev->set_master_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::getMasterVolume(float *volume) {
-    if (mDev->get_master_volume == NULL) return INVALID_OPERATION;
-    return mDev->get_master_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::setMode(audio_mode_t mode) {
-    return mDev->set_mode(mDev, mode);
-}
-
-status_t DeviceHalLocal::setMicMute(bool state) {
-    return mDev->set_mic_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::getMicMute(bool *state) {
-    return mDev->get_mic_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::setMasterMute(bool state) {
-    if (mDev->set_master_mute == NULL) return INVALID_OPERATION;
-    return mDev->set_master_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::getMasterMute(bool *state) {
-    if (mDev->get_master_mute == NULL) return INVALID_OPERATION;
-    return mDev->get_master_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::setParameters(const String8& kvPairs) {
-    return mDev->set_parameters(mDev, kvPairs.string());
-}
-
-status_t DeviceHalLocal::getParameters(const String8& keys, String8 *values) {
-    char *halValues = mDev->get_parameters(mDev, keys.string());
-    if (halValues != NULL) {
-        values->setTo(halValues);
-        free(halValues);
-    } else {
-        values->clear();
-    }
-    return OK;
-}
-
-status_t DeviceHalLocal::getInputBufferSize(
-        const struct audio_config *config, size_t *size) {
-    *size = mDev->get_input_buffer_size(mDev, config);
-    return OK;
-}
-
-status_t DeviceHalLocal::openOutputStream(
-        audio_io_handle_t handle,
-        audio_devices_t deviceType,
-        audio_output_flags_t flags,
-        struct audio_config *config,
-        const char *address,
-        sp<StreamOutHalInterface> *outStream) {
-    audio_stream_out_t *halStream;
-    ALOGV("open_output_stream handle: %d devices: %x flags: %#x"
-            "srate: %d format %#x channels %x address %s",
-            handle, deviceType, flags,
-            config->sample_rate, config->format, config->channel_mask,
-            address);
-    int openResut = mDev->open_output_stream(
-            mDev, handle, deviceType, flags, config, &halStream, address);
-    if (openResut == OK) {
-        *outStream = new StreamOutHalLocal(halStream, this);
-    }
-    ALOGV("open_output_stream status %d stream %p", openResut, halStream);
-    return openResut;
-}
-
-status_t DeviceHalLocal::openInputStream(
-        audio_io_handle_t handle,
-        audio_devices_t devices,
-        struct audio_config *config,
-        audio_input_flags_t flags,
-        const char *address,
-        audio_source_t source,
-        audio_devices_t /*outputDevice*/,
-        const char */*outputDeviceAddress*/,
-        sp<StreamInHalInterface> *inStream) {
-    audio_stream_in_t *halStream;
-    ALOGV("open_input_stream handle: %d devices: %x flags: %#x "
-            "srate: %d format %#x channels %x address %s source %d",
-            handle, devices, flags,
-            config->sample_rate, config->format, config->channel_mask,
-            address, source);
-    int openResult = mDev->open_input_stream(
-            mDev, handle, devices, config, &halStream, flags, address, source);
-    if (openResult == OK) {
-        *inStream = new StreamInHalLocal(halStream, this);
-    }
-    ALOGV("open_input_stream status %d stream %p", openResult, inStream);
-    return openResult;
-}
-
-status_t DeviceHalLocal::supportsAudioPatches(bool *supportsPatches) {
-    *supportsPatches = version() >= AUDIO_DEVICE_API_VERSION_3_0;
-    return OK;
-}
-
-status_t DeviceHalLocal::createAudioPatch(
-        unsigned int num_sources,
-        const struct audio_port_config *sources,
-        unsigned int num_sinks,
-        const struct audio_port_config *sinks,
-        audio_patch_handle_t *patch) {
-    if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
-        return mDev->create_audio_patch(
-                mDev, num_sources, sources, num_sinks, sinks, patch);
-    } else {
-        return INVALID_OPERATION;
-    }
-}
-
-status_t DeviceHalLocal::releaseAudioPatch(audio_patch_handle_t patch) {
-    if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
-        return mDev->release_audio_patch(mDev, patch);
-    } else {
-        return INVALID_OPERATION;
-    }
-}
-
-status_t DeviceHalLocal::getAudioPort(struct audio_port *port) {
-    return mDev->get_audio_port(mDev, port);
-}
-
-status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
-#if MAJOR_VERSION >= 7
-    if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
-        // get_audio_port_v7 is mandatory if legacy HAL support this API version.
-        return mDev->get_audio_port_v7(mDev, port);
-    }
-#endif
-    struct audio_port audioPort = {};
-    audio_populate_audio_port(port, &audioPort);
-    status_t status = getAudioPort(&audioPort);
-    if (status == NO_ERROR) {
-        audio_populate_audio_port_v7(&audioPort, port);
-    }
-    return status;
-}
-
-status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
-    if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
-        return mDev->set_audio_port_config(mDev, config);
-    else
-        return INVALID_OPERATION;
-}
-
-#if MAJOR_VERSION == 2
-status_t DeviceHalLocal::getMicrophones(
-        std::vector<media::MicrophoneInfo> *microphones __unused) {
-    return INVALID_OPERATION;
-}
-#elif MAJOR_VERSION >= 4
-status_t DeviceHalLocal::getMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
-    if (mDev->get_microphones == NULL) return INVALID_OPERATION;
-    size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
-    audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
-    status_t status = mDev->get_microphones(mDev, &mic_array[0], &actual_mics);
-    for (size_t i = 0; i < actual_mics; i++) {
-        media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
-        microphones->push_back(microphoneInfo);
-    }
-    return status;
-}
-#endif
-
-// Local HAL implementation does not support effects
-status_t DeviceHalLocal::addDeviceEffect(
-        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
-    return INVALID_OPERATION;
-}
-
-status_t DeviceHalLocal::removeDeviceEffect(
-        audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
-    return INVALID_OPERATION;
-}
-
-status_t DeviceHalLocal::dump(int fd, const Vector<String16>& /* args */) {
-    return mDev->dump(mDev, fd);
-}
-
-void DeviceHalLocal::closeOutputStream(struct audio_stream_out *stream_out) {
-    mDev->close_output_stream(mDev, stream_out);
-}
-
-void DeviceHalLocal::closeInputStream(struct audio_stream_in *stream_in) {
-    mDev->close_input_stream(mDev, stream_in);
-}
-
-} // namespace CPP_VERSION
-} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
deleted file mode 100644
index 2fde936..0000000
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
-#define ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
-
-#include <hardware/audio.h>
-#include <media/audiohal/DeviceHalInterface.h>
-
-namespace android {
-namespace CPP_VERSION {
-
-class DeviceHalLocal : public DeviceHalInterface
-{
-  public:
-    // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
-    virtual status_t getSupportedDevices(uint32_t *devices);
-
-    // Check to see if the audio hardware interface has been initialized.
-    virtual status_t initCheck();
-
-    // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
-    virtual status_t setVoiceVolume(float volume);
-
-    // Set the audio volume for all audio activities other than voice call.
-    virtual status_t setMasterVolume(float volume);
-
-    // Get the current master volume value for the HAL.
-    virtual status_t getMasterVolume(float *volume);
-
-    // Called when the audio mode changes.
-    virtual status_t setMode(audio_mode_t mode);
-
-    // Muting control.
-    virtual status_t setMicMute(bool state);
-    virtual status_t getMicMute(bool *state);
-    virtual status_t setMasterMute(bool state);
-    virtual status_t getMasterMute(bool *state);
-
-    // Set global audio parameters.
-    virtual status_t setParameters(const String8& kvPairs);
-
-    // Get global audio parameters.
-    virtual status_t getParameters(const String8& keys, String8 *values);
-
-    // Returns audio input buffer size according to parameters passed.
-    virtual status_t getInputBufferSize(const struct audio_config *config,
-            size_t *size);
-
-    // Creates and opens the audio hardware output stream. The stream is closed
-    // by releasing all references to the returned object.
-    virtual status_t openOutputStream(
-            audio_io_handle_t handle,
-            audio_devices_t devices,
-            audio_output_flags_t flags,
-            struct audio_config *config,
-            const char *address,
-            sp<StreamOutHalInterface> *outStream);
-
-    // Creates and opens the audio hardware input stream. The stream is closed
-    // by releasing all references to the returned object.
-    virtual status_t openInputStream(
-            audio_io_handle_t handle,
-            audio_devices_t devices,
-            struct audio_config *config,
-            audio_input_flags_t flags,
-            const char *address,
-            audio_source_t source,
-            audio_devices_t outputDevice,
-            const char *outputDeviceAddress,
-            sp<StreamInHalInterface> *inStream);
-
-    // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
-    virtual status_t supportsAudioPatches(bool *supportsPatches);
-
-    // Creates an audio patch between several source and sink ports.
-    virtual status_t createAudioPatch(
-            unsigned int num_sources,
-            const struct audio_port_config *sources,
-            unsigned int num_sinks,
-            const struct audio_port_config *sinks,
-            audio_patch_handle_t *patch);
-
-    // Releases an audio patch.
-    virtual status_t releaseAudioPatch(audio_patch_handle_t patch);
-
-    // Fills the list of supported attributes for a given audio port.
-    virtual status_t getAudioPort(struct audio_port *port);
-
-    // Fills the list of supported attributes for a given audio port.
-    virtual status_t getAudioPort(struct audio_port_v7 *port);
-
-    // Set audio port configuration.
-    virtual status_t setAudioPortConfig(const struct audio_port_config *config);
-
-    // List microphones
-    virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
-
-    status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
-    status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
-
-    status_t dump(int fd, const Vector<String16>& args) override;
-
-    void closeOutputStream(struct audio_stream_out *stream_out);
-    void closeInputStream(struct audio_stream_in *stream_in);
-
-    uint32_t version() const { return mDev->common.version; }
-
-  private:
-    audio_hw_device_t *mDev;
-
-    friend class DevicesFactoryHalLocal;
-
-    // Can not be constructed directly by clients.
-    explicit DeviceHalLocal(audio_hw_device_t *dev);
-
-    // The destructor automatically closes the device.
-    virtual ~DeviceHalLocal();
-};
-
-} // namespace CPP_VERSION
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 1c0eacb..4069a6b 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -26,19 +26,17 @@
 #include <media/audiohal/hidl/HalDeathHandler.h>
 #include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
 #include "DeviceHalHidl.h"
 #include "DevicesFactoryHalHidl.h"
 
 using ::android::hardware::audio::CPP_VERSION::IDevice;
-using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::hidl::manager::V1_0::IServiceManager;
 using ::android::hidl::manager::V1_0::IServiceNotification;
 
 namespace android {
-namespace CPP_VERSION {
 
 class ServiceNotificationListener : public IServiceNotification {
   public:
@@ -115,14 +113,37 @@
     if (status != OK) return status;
     Result retval = Result::NOT_INITIALIZED;
     for (const auto& factory : factories) {
-        Return<void> ret = factory->openDevice(
-                hidlId,
-                [&](Result r, const sp<IDevice>& result) {
-                    retval = r;
-                    if (retval == Result::OK) {
-                        *device = new DeviceHalHidl(result);
-                    }
-                });
+        Return<void> ret;
+        if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_PRIMARY) == 0) {
+            // In V7.1 it's not possible to cast IDevice back to IPrimaryDevice,
+            // thus openPrimaryDevice must be used.
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+            ret = factory->openPrimaryDevice_7_1(
+#else
+            ret = factory->openPrimaryDevice(
+#endif
+                    [&](Result r,
+                        const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& result) {
+                        retval = r;
+                        if (retval == Result::OK) {
+                            *device = new DeviceHalHidl(result);
+                        }
+                    });
+        } else {
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+            ret = factory->openDevice_7_1(
+#else
+            ret = factory->openDevice(
+#endif
+                    hidlId,
+                    [&](Result r,
+                        const sp<::android::hardware::audio::CPP_VERSION::IDevice>& result) {
+                        retval = r;
+                        if (retval == Result::OK) {
+                            *device = new DeviceHalHidl(result);
+                        }
+                    });
+        }
         if (!ret.isOk()) return FAILED_TRANSACTION;
         switch (retval) {
             // Device was found and was initialized successfully.
@@ -178,7 +199,8 @@
     return NO_ERROR;
 }
 
-void DevicesFactoryHalHidl::addDeviceFactory(sp<IDevicesFactory> factory, bool needToNotify) {
+void DevicesFactoryHalHidl::addDeviceFactory(
+        sp<::android::hardware::audio::CPP_VERSION::IDevicesFactory> factory, bool needToNotify) {
     // It is assumed that the DevicesFactoryHalInterface instance is owned
     // by AudioFlinger and thus have the same lifespan.
     factory->linkToDeath(HalDeathHandler::getInstance(), 0 /*cookie*/);
@@ -198,10 +220,16 @@
     }
 }
 
-std::vector<sp<IDevicesFactory>> DevicesFactoryHalHidl::copyDeviceFactories() {
+std::vector<sp<::android::hardware::audio::CPP_VERSION::IDevicesFactory>>
+        DevicesFactoryHalHidl::copyDeviceFactories() {
     std::lock_guard<std::mutex> lock(mLock);
     return mDeviceFactories;
 }
 
-} // namespace CPP_VERSION
+// Main entry-point to the shared library.
+extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
+    auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
+    return service ? new DevicesFactoryHalHidl(service) : nullptr;
+}
+
 } // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 6f84efe..ffd229d 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -30,7 +30,6 @@
 using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
 
 namespace android {
-namespace CPP_VERSION {
 
 class DevicesFactoryHalHidl : public DevicesFactoryHalInterface
 {
@@ -46,6 +45,8 @@
 
     status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
 
+    float getHalVersion() const override { return MAJOR_VERSION + (float)MINOR_VERSION / 10; }
+
   private:
     friend class ServiceNotificationListener;
     void addDeviceFactory(sp<IDevicesFactory> factory, bool needToNotify);
@@ -59,7 +60,6 @@
     virtual ~DevicesFactoryHalHidl() = default;
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
deleted file mode 100644
index cde8d85..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DevicesFactoryHalHybrid"
-//#define LOG_NDEBUG 0
-
-#include "DevicesFactoryHalHidl.h"
-#include "DevicesFactoryHalHybrid.h"
-#include "DevicesFactoryHalLocal.h"
-
-namespace android {
-namespace CPP_VERSION {
-
-DevicesFactoryHalHybrid::DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory)
-        : mLocalFactory(new DevicesFactoryHalLocal()),
-          mHidlFactory(new DevicesFactoryHalHidl(hidlFactory)) {
-}
-
-status_t DevicesFactoryHalHybrid::openDevice(const char *name, sp<DeviceHalInterface> *device) {
-    if (mHidlFactory != 0 && strcmp(AUDIO_HARDWARE_MODULE_ID_A2DP, name) != 0 &&
-        strcmp(AUDIO_HARDWARE_MODULE_ID_HEARING_AID, name) != 0) {
-        return mHidlFactory->openDevice(name, device);
-    }
-    return mLocalFactory->openDevice(name, device);
-}
-
-status_t DevicesFactoryHalHybrid::getHalPids(std::vector<pid_t> *pids) {
-    if (mHidlFactory != 0) {
-        return mHidlFactory->getHalPids(pids);
-    }
-    return INVALID_OPERATION;
-}
-
-status_t DevicesFactoryHalHybrid::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
-    if (mHidlFactory) {
-        return mHidlFactory->setCallbackOnce(callback);
-    }
-    return INVALID_OPERATION;
-}
-
-} // namespace CPP_VERSION
-
-extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
-    auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
-    return service ? new CPP_VERSION::DevicesFactoryHalHybrid(service) : nullptr;
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
deleted file mode 100644
index 568a1fb..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
-
-#include PATH(android/hardware/audio/FILE_VERSION/IDevicesFactory.h)
-#include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
-
-namespace android {
-namespace CPP_VERSION {
-
-class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
-{
-  public:
-    DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory);
-
-    // Opens a device with the specified name. To close the device, it is
-    // necessary to release references to the returned object.
-    virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
-
-            status_t getHalPids(std::vector<pid_t> *pids) override;
-
-            status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
-
-  private:
-    sp<DevicesFactoryHalInterface> mLocalFactory;
-    sp<DevicesFactoryHalInterface> mHidlFactory;
-};
-
-} // namespace CPP_VERSION
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
deleted file mode 100644
index af67ff5..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DevicesFactoryHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <string.h>
-
-#include <hardware/audio.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "DevicesFactoryHalLocal.h"
-
-namespace android {
-namespace CPP_VERSION {
-
-static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
-{
-    const hw_module_t *mod;
-    int rc;
-
-    rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
-    if (rc) {
-        ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__,
-                AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
-        goto out;
-    }
-    rc = audio_hw_device_open(mod, dev);
-    if (rc) {
-        ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__,
-                AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
-        goto out;
-    }
-    if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
-        ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
-        rc = BAD_VALUE;
-        audio_hw_device_close(*dev);
-        goto out;
-    }
-    return OK;
-
-out:
-    *dev = NULL;
-    return rc;
-}
-
-status_t DevicesFactoryHalLocal::openDevice(const char *name, sp<DeviceHalInterface> *device) {
-    audio_hw_device_t *dev;
-    status_t rc = load_audio_interface(name, &dev);
-    if (rc == OK) {
-        *device = new DeviceHalLocal(dev);
-    }
-    return rc;
-}
-
-} // namespace CPP_VERSION
-} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.h b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
deleted file mode 100644
index 32bf362..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
-
-#include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-#include "DeviceHalLocal.h"
-
-namespace android {
-namespace CPP_VERSION {
-
-class DevicesFactoryHalLocal : public DevicesFactoryHalInterface
-{
-  public:
-    // Opens a device with the specified name. To close the device, it is
-    // necessary to release references to the returned object.
-    virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
-
-            status_t getHalPids(std::vector<pid_t> *pids __unused) override {
-                return INVALID_OPERATION;
-            }
-
-            status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback __unused) override {
-                return INVALID_OPERATION;
-            }
-
-  private:
-    friend class DevicesFactoryHalHybrid;
-
-    // Can not be constructed directly by clients.
-    DevicesFactoryHalLocal() {}
-
-    virtual ~DevicesFactoryHalLocal() {}
-};
-
-} // namespace CPP_VERSION
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.cpp b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
index 5367972..9d5f72e 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
@@ -23,7 +23,6 @@
 #include <hidlmemory/mapping.h>
 #include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
 #include "EffectBufferHalHidl.h"
 
 using ::android::hardware::Return;
@@ -31,7 +30,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 // static
 uint64_t EffectBufferHalHidl::makeUniqueId() {
@@ -144,5 +142,4 @@
 }
 
 } // namespace effect
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.h b/media/libaudiohal/impl/EffectBufferHalHidl.h
index 4826813..a9df68b 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.h
@@ -28,7 +28,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
@@ -74,7 +73,6 @@
     status_t init();
 };
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
 
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.cpp b/media/libaudiohal/impl/EffectConversionHelperHidl.cpp
new file mode 100644
index 0000000..9e4f79c
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HalHidl"
+#include <utils/Log.h>
+
+#include "EffectConversionHelperHidl.h"
+
+namespace android {
+
+EffectConversionHelperHidl::EffectConversionHelperHidl(std::string_view className)
+        : ConversionHelperHidl<EffectResult>(className, analyzeResult) {
+}
+
+// static
+status_t EffectConversionHelperHidl::analyzeResult(const EffectResult& result) {
+    switch (result) {
+        case EffectResult::OK: return OK;
+        case EffectResult::INVALID_ARGUMENTS: return BAD_VALUE;
+        case EffectResult::INVALID_STATE: return NOT_ENOUGH_DATA;
+        case EffectResult::NOT_INITIALIZED: return NO_INIT;
+        case EffectResult::NOT_SUPPORTED: return INVALID_OPERATION;
+        case EffectResult::RESULT_TOO_BIG: return NO_MEMORY;
+    }
+    return NO_INIT;
+}
+
+}  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.h b/media/libaudiohal/impl/EffectConversionHelperHidl.h
new file mode 100644
index 0000000..4371d12
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
+#define ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
+
+#include "ConversionHelperHidl.h"
+
+#include PATH(android/hardware/audio/effect/FILE_VERSION/types.h)
+
+using EffectResult = ::android::hardware::audio::effect::CPP_VERSION::Result;
+
+namespace android {
+
+class EffectConversionHelperHidl : public ConversionHelperHidl<EffectResult> {
+  protected:
+    static status_t analyzeResult(const EffectResult& result);
+
+    EffectConversionHelperHidl(std::string_view className);
+};
+
+}  // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 51ad146..fdfe225 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -21,6 +21,7 @@
 #include <cutils/native_handle.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/EffectsFactoryApi.h>
+#include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
 
 #include <util/EffectUtils.h>
@@ -36,13 +37,16 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
+#define TIME_CHECK() auto timeCheck = \
+        mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
 EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
-        : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
+        : EffectConversionHelperHidl("EffectHalHidl"),
+          mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
     effect_descriptor_t halDescriptor{};
     if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
         mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
@@ -60,20 +64,9 @@
     }
 }
 
-// static
-status_t EffectHalHidl::analyzeResult(const Result& result) {
-    switch (result) {
-        case Result::OK: return OK;
-        case Result::INVALID_ARGUMENTS: return BAD_VALUE;
-        case Result::INVALID_STATE: return NOT_ENOUGH_DATA;
-        case Result::NOT_INITIALIZED: return NO_INIT;
-        case Result::NOT_SUPPORTED: return INVALID_OPERATION;
-        case Result::RESULT_TOO_BIG: return NO_MEMORY;
-        default: return NO_INIT;
-    }
-}
-
 status_t EffectHalHidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    TIME_CHECK();
+
     if (!mBuffersChanged) {
         if (buffer.get() == nullptr || mInBuffer.get() == nullptr) {
             mBuffersChanged = buffer.get() != mInBuffer.get();
@@ -86,6 +79,8 @@
 }
 
 status_t EffectHalHidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+    TIME_CHECK();
+
     if (!mBuffersChanged) {
         if (buffer.get() == nullptr || mOutBuffer.get() == nullptr) {
             mBuffersChanged = buffer.get() != mOutBuffer.get();
@@ -98,10 +93,14 @@
 }
 
 status_t EffectHalHidl::process() {
+    TIME_CHECK();
+
     return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS));
 }
 
 status_t EffectHalHidl::processReverse() {
+    TIME_CHECK();
+
     return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE));
 }
 
@@ -184,6 +183,8 @@
 
 status_t EffectHalHidl::command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
         uint32_t *replySize, void *pReplyData) {
+    TIME_CHECK();
+
     if (mEffect == 0) return NO_INIT;
 
     // Special cases.
@@ -215,6 +216,8 @@
 }
 
 status_t EffectHalHidl::getDescriptor(effect_descriptor_t *pDescriptor) {
+    TIME_CHECK();
+
     if (mEffect == 0) return NO_INIT;
     Result retval = Result::NOT_INITIALIZED;
     Return<void> ret = mEffect->getDescriptor(
@@ -228,12 +231,16 @@
 }
 
 status_t EffectHalHidl::close() {
+    TIME_CHECK();
+
     if (mEffect == 0) return NO_INIT;
     Return<Result> ret = mEffect->close();
     return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
 }
 
 status_t EffectHalHidl::dump(int fd) {
+    TIME_CHECK();
+
     if (mEffect == 0) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
     hidlHandle->data[0] = fd;
@@ -310,6 +317,5 @@
     return result;
 }
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 8e46638..e139768 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -23,16 +23,17 @@
 #include <fmq/MessageQueue.h>
 #include <system/audio_effect.h>
 
+#include "EffectConversionHelperHidl.h"
+
 using ::android::hardware::EventFlag;
 using ::android::hardware::MessageQueue;
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-class EffectHalHidl : public EffectHalInterface
+class EffectHalHidl : public EffectHalInterface, public EffectConversionHelperHidl
 {
   public:
     // Set the input buffer.
@@ -63,7 +64,7 @@
 
     virtual status_t dump(int fd);
 
-    uint64_t effectId() const { return mEffectId; }
+    virtual uint64_t effectId() const { return mEffectId; }
 
   private:
     friend class EffectsFactoryHalHidl;
@@ -78,8 +79,6 @@
     EventFlag* mEfGroup;
     bool mIsInput = false;
 
-    static status_t analyzeResult(const Result& result);
-
     // Can not be constructed directly by clients.
     EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
 
@@ -96,7 +95,6 @@
     status_t setProcessBuffers();
 };
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
 
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index ffe0d72..d7217fc 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -21,8 +21,9 @@
 
 #include <UuidUtils.h>
 #include <util/EffectUtils.h>
+#include <utils/Log.h>
 
-#include "ConversionHelperHidl.h"
+#include "EffectConversionHelperHidl.h"
 #include "EffectBufferHalHidl.h"
 #include "EffectHalHidl.h"
 #include "EffectsFactoryHalHidl.h"
@@ -33,13 +34,12 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
 EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
-        : ConversionHelperHidl("EffectsFactory") {
+        : EffectConversionHelperHidl("EffectsFactory") {
     ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
     mEffectsFactory = effectsFactory;
 }
@@ -204,12 +204,14 @@
     return EffectBufferHalHidl::mirror(external, size, buffer);
 }
 
-} // namespace CPP_VERSION
 } // namespace effect
 
-extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
+// When a shared library is built from a static library, even explicit
+// exports from a static library are optimized out unless actually used by
+// the shared library. See EffectsFactoryHalHidlEntry.cpp.
+extern "C" void* createIEffectsFactoryImpl() {
     auto service = hardware::audio::effect::CPP_VERSION::IEffectsFactory::getService();
-    return service ? new effect::CPP_VERSION::EffectsFactoryHalHidl(service) : nullptr;
+    return service ? new effect::EffectsFactoryHalHidl(service) : nullptr;
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index ff26d9f..e1882e1 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -20,17 +20,15 @@
 #include PATH(android/hardware/audio/effect/FILE_VERSION/IEffectsFactory.h)
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 
-#include "ConversionHelperHidl.h"
+#include "EffectConversionHelperHidl.h"
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using ::android::hardware::hidl_vec;
-using ::android::CPP_VERSION::ConversionHelperHidl;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
-class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public ConversionHelperHidl
+class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public EffectConversionHelperHidl
 {
   public:
     EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory);
@@ -70,7 +68,6 @@
     status_t queryAllDescriptors();
 };
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
 
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
similarity index 71%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
copy to media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
index 9e04e82..2c6f2c6 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,14 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package android.media;
 
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioEncapsulationMode {
-     NONE = 0,
-     ELEMENTARY_STREAM = 1,
-     HANDLE = 2,
+extern "C" void* createIEffectsFactoryImpl();
+
+extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
+    return createIEffectsFactoryImpl();
 }
diff --git a/media/libaudiohal/impl/ParameterUtils.h b/media/libaudiohal/impl/ParameterUtils.h
index 9cab72e..b5dcb9d 100644
--- a/media/libaudiohal/impl/ParameterUtils.h
+++ b/media/libaudiohal/impl/ParameterUtils.h
@@ -16,17 +16,16 @@
 
 #pragma once
 
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
 #include <hidl/HidlSupport.h>
 
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
 using ::android::hardware::Return;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::hidl_string;
 
 namespace android {
-namespace CPP_VERSION {
 namespace utils {
 
 #if MAJOR_VERSION == 2
@@ -56,5 +55,4 @@
 #endif
 
 } // namespace utils
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index e63aded..021ec51 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -20,35 +20,37 @@
 #include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
+#include <mediautils/memory.h>
 #include <mediautils/SchedulingPolicyService.h>
+#include <mediautils/TimeCheck.h>
 #include <utils/Log.h>
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutCallback.h)
 #include <HidlUtils.h>
 #include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
-#include "EffectHalHidl.h"
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
-using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
+using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::MQDescriptorSync;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 
 namespace android {
-namespace CPP_VERSION {
 
-using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
-using ReadCommand = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadCommand;
+using ReadCommand = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadCommand;
 
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
-StreamHalHidl::StreamHalHidl(IStream *stream)
-        : ConversionHelperHidl("Stream"),
+#define TIME_CHECK() auto TimeCheck = \
+       mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
+StreamHalHidl::StreamHalHidl(std::string_view className, IStream *stream)
+        : CoreConversionHelperHidl(className),
           mStream(stream),
           mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT),
           mCachedBufferSize(0){
@@ -69,6 +71,7 @@
 }
 
 status_t StreamHalHidl::getBufferSize(size_t *size) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
     if (status == OK) {
@@ -78,6 +81,7 @@
 }
 
 status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+    TIME_CHECK();
     *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
     if (!mStream) return NO_INIT;
 #if MAJOR_VERSION <= 6
@@ -107,6 +111,7 @@
 }
 
 status_t StreamHalHidl::setParameters(const String8& kvPairs) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     hidl_vec<ParameterValue> hidlParams;
     status_t status = parametersFromHal(kvPairs, &hidlParams);
@@ -116,6 +121,7 @@
 }
 
 status_t StreamHalHidl::getParameters(const String8& keys, String8 *values) {
+    TIME_CHECK();
     values->clear();
     if (!mStream) return NO_INIT;
     hidl_vec<hidl_string> hidlKeys;
@@ -136,23 +142,25 @@
 }
 
 status_t StreamHalHidl::addEffect(sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return processReturn("addEffect", mStream->addEffect(
-                    static_cast<EffectHalHidl*>(effect.get())->effectId()));
+    return processReturn("addEffect", mStream->addEffect(effect->effectId()));
 }
 
 status_t StreamHalHidl::removeEffect(sp<EffectHalInterface> effect) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return processReturn("removeEffect", mStream->removeEffect(
-                    static_cast<EffectHalHidl*>(effect.get())->effectId()));
+    return processReturn("removeEffect", mStream->removeEffect(effect->effectId()));
 }
 
 status_t StreamHalHidl::standby() {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("standby", mStream->standby());
 }
 
 status_t StreamHalHidl::dump(int fd, const Vector<String16>& args) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
     hidlHandle->data[0] = fd;
@@ -177,17 +185,20 @@
 }
 
 status_t StreamHalHidl::start() {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("start", mStream->start());
 }
 
 status_t StreamHalHidl::stop() {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("stop", mStream->stop());
 }
 
 status_t StreamHalHidl::createMmapBuffer(int32_t minSizeFrames,
                                   struct audio_mmap_buffer_info *info) {
+    TIME_CHECK();
     Result retval;
     Return<void> ret = mStream->createMmapBuffer(
             minSizeFrames,
@@ -220,6 +231,7 @@
 }
 
 status_t StreamHalHidl::getMmapPosition(struct audio_mmap_position *position) {
+    TIME_CHECK();
     Result retval;
     Return<void> ret = mStream->getMmapPosition(
             [&](Result r, const MmapPosition& hidlPosition) {
@@ -248,7 +260,7 @@
 status_t StreamHalHidl::getHalPid(pid_t *pid) {
     using ::android::hidl::base::V1_0::DebugInfo;
     using ::android::hidl::manager::V1_0::IServiceManager;
-
+    TIME_CHECK();
     DebugInfo debugInfo;
     auto ret = mStream->getDebugInfo([&] (const auto &info) {
         debugInfo = info;
@@ -276,6 +288,34 @@
     return err == 0;
 }
 
+status_t StreamHalHidl::legacyCreateAudioPatch(const struct audio_port_config& port,
+                                               std::optional<audio_source_t> source,
+                                               audio_devices_t type) {
+    TIME_CHECK();
+    LOG_ALWAYS_FATAL_IF(port.type != AUDIO_PORT_TYPE_DEVICE, "port type must be device");
+    unique_malloced_ptr<char> address;
+    if (strcmp(port.ext.device.address, "") != 0) {
+        // FIXME: we only support address on first sink with HAL version < 3.0
+        address.reset(
+                audio_device_address_to_parameter(port.ext.device.type, port.ext.device.address));
+    } else {
+        address.reset((char*)calloc(1, 1));
+    }
+    AudioParameter param = AudioParameter(String8(address.get()));
+    param.addInt(String8(AudioParameter::keyRouting), (int)type);
+    if (source.has_value()) {
+        param.addInt(String8(AudioParameter::keyInputSource), (int)source.value());
+    }
+    return setParameters(param.toString());
+}
+
+status_t StreamHalHidl::legacyReleaseAudioPatch() {
+    TIME_CHECK();
+    AudioParameter param;
+    param.addInt(String8(AudioParameter::keyRouting), 0);
+    return setParameters(param.toString());
+}
+
 namespace {
 
 /* Notes on callback ownership.
@@ -328,8 +368,10 @@
 
 }  // namespace
 
-StreamOutHalHidl::StreamOutHalHidl(const sp<IStreamOut>& stream)
-        : StreamHalHidl(stream.get()), mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
+StreamOutHalHidl::StreamOutHalHidl(
+        const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream)
+        : StreamHalHidl("StreamOutHalHidl", stream.get())
+        , mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
 }
 
 StreamOutHalHidl::~StreamOutHalHidl() {
@@ -353,11 +395,13 @@
 }
 
 status_t StreamOutHalHidl::getFrameSize(size_t *size) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("getFrameSize", mStream->getFrameSize(), size);
 }
 
 status_t StreamOutHalHidl::getLatency(uint32_t *latency) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     if (mWriterClient == gettid() && mCommandMQ) {
         return callWriterThread(
@@ -371,12 +415,14 @@
 }
 
 status_t StreamOutHalHidl::setVolume(float left, float right) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("setVolume", mStream->setVolume(left, right));
 }
 
 #if MAJOR_VERSION == 2
 status_t StreamOutHalHidl::selectPresentation(int presentationId, int programId) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     std::vector<ParameterValue> parameters;
     String8 halParameters;
@@ -387,6 +433,7 @@
 }
 #elif MAJOR_VERSION >= 4
 status_t StreamOutHalHidl::selectPresentation(int presentationId, int programId) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("selectPresentation",
             mStream->selectPresentation(presentationId, programId));
@@ -394,6 +441,7 @@
 #endif
 
 status_t StreamOutHalHidl::write(const void *buffer, size_t bytes, size_t *written) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     *written = 0;
 
@@ -539,6 +587,7 @@
 }
 
 status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getRenderPosition(
@@ -552,6 +601,7 @@
 }
 
 status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getNextWriteTimestamp(
@@ -565,6 +615,7 @@
 }
 
 status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     status_t status = processReturn(
             "setCallback", mStream->setCallback(new StreamOutCallback(this)));
@@ -575,6 +626,7 @@
 }
 
 status_t StreamOutHalHidl::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Return<void> ret = mStream->supportsPauseAndResume(
             [&](bool p, bool r) {
@@ -585,32 +637,38 @@
 }
 
 status_t StreamOutHalHidl::pause() {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("pause", mStream->pause());
 }
 
 status_t StreamOutHalHidl::resume() {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("pause", mStream->resume());
 }
 
 status_t StreamOutHalHidl::supportsDrain(bool *supportsDrain) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("supportsDrain", mStream->supportsDrain(), supportsDrain);
 }
 
 status_t StreamOutHalHidl::drain(bool earlyNotify) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "drain", mStream->drain(earlyNotify ? AudioDrain::EARLY_NOTIFY : AudioDrain::ALL));
 }
 
 status_t StreamOutHalHidl::flush() {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("pause", mStream->flush());
 }
 
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     if (mWriterClient == gettid() && mCommandMQ) {
         return callWriterThread(
@@ -644,7 +702,12 @@
 #elif MAJOR_VERSION >= 4
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
-    CPP_VERSION::SourceMetadata hidlMetadata;
+    TIME_CHECK();
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
     if (status_t status = CoreUtils::sourceMetadataFromHalV7(
                     sourceMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
             status != OK) {
@@ -686,9 +749,11 @@
     // Codec format callback is supported starting from audio HAL V6.0
     return INVALID_OPERATION;
 }
+
 #else
 
 status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getDualMonoMode(
@@ -702,12 +767,14 @@
 }
 
 status_t StreamOutHalHidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setDualMonoMode", mStream->setDualMonoMode(static_cast<DualMonoMode>(mode)));
 }
 
 status_t StreamOutHalHidl::getAudioDescriptionMixLevel(float* leveldB) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getAudioDescriptionMixLevel(
@@ -721,12 +788,14 @@
 }
 
 status_t StreamOutHalHidl::setAudioDescriptionMixLevel(float leveldB) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setAudioDescriptionMixLevel", mStream->setAudioDescriptionMixLevel(leveldB));
 }
 
 status_t StreamOutHalHidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getPlaybackRateParameters(
@@ -747,6 +816,7 @@
 }
 
 status_t StreamOutHalHidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn(
             "setPlaybackRateParameters", mStream->setPlaybackRateParameters(
@@ -755,7 +825,7 @@
                     static_cast<TimestretchFallbackMode>(playbackRate.mFallbackMode)}));
 }
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutEventCallback.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutEventCallback.h)
 
 namespace {
 
@@ -781,6 +851,7 @@
 
 status_t StreamOutHalHidl::setEventCallback(
         const sp<StreamOutHalInterfaceEventCallback>& callback) {
+    TIME_CHECK();
     if (mStream == nullptr) return NO_INIT;
     mEventCallback = callback;
     status_t status = processReturn(
@@ -791,6 +862,87 @@
 }
 #endif
 
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+using hardware::audio::V7_1::LatencyMode;
+
+status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode) {
+    TIME_CHECK();
+    if (mStream == 0) return NO_INIT;
+    return processReturn(
+            "setLatencyMode", mStream->setLatencyMode(static_cast<LatencyMode>(mode)));
+};
+
+status_t StreamOutHalHidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
+    TIME_CHECK();
+    if (!mStream) return NO_INIT;
+    Result retval;
+    Return<void> ret = mStream->getRecommendedLatencyModes(
+            [&](Result r, hidl_vec<LatencyMode> hidlModes) {
+        retval = r;
+        for (size_t i = 0; i < hidlModes.size(); i++) {
+            modes->push_back(static_cast<audio_latency_mode_t>(hidlModes[i]));
+        }
+    });
+    return processReturn("getRecommendedLatencyModes", ret, retval);
+};
+
+#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutLatencyModeCallback.h)
+
+using hardware::audio::V7_1::IStreamOutLatencyModeCallback;
+
+namespace {
+struct StreamOutLatencyModeCallback : public IStreamOutLatencyModeCallback {
+    StreamOutLatencyModeCallback(const wp<StreamOutHalHidl>& stream) : mStream(stream) {}
+
+    // IStreamOutLatencyModeCallback implementation
+    Return<void> onRecommendedLatencyModeChanged(const hidl_vec<LatencyMode>& hidlModes) override {
+        sp<StreamOutHalHidl> stream = mStream.promote();
+        if (stream != nullptr) {
+            std::vector<audio_latency_mode_t> modes;
+            for (size_t i = 0; i < hidlModes.size(); i++) {
+                modes.push_back(static_cast<audio_latency_mode_t>(hidlModes[i]));
+            }
+            stream->onRecommendedLatencyModeChanged(modes);
+        }
+        return Void();
+    }
+
+  private:
+    wp<StreamOutHalHidl> mStream;
+};
+}  // namespace
+
+status_t StreamOutHalHidl::setLatencyModeCallback(
+        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
+    TIME_CHECK();
+
+    if (mStream == nullptr) return NO_INIT;
+    mLatencyModeCallback = callback;
+    status_t status = processReturn(
+            "setLatencyModeCallback",
+            mStream->setLatencyModeCallback(
+                    callback.get() == nullptr ? nullptr : new StreamOutLatencyModeCallback(this)));
+    return status;
+};
+
+#else
+
+status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode __unused) {
+    return INVALID_OPERATION;
+};
+
+status_t StreamOutHalHidl::getRecommendedLatencyModes(
+        std::vector<audio_latency_mode_t> *modes __unused) {
+    return INVALID_OPERATION;
+};
+
+status_t StreamOutHalHidl::setLatencyModeCallback(
+        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) {
+    return INVALID_OPERATION;
+};
+
+#endif
+
 void StreamOutHalHidl::onWriteReady() {
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
@@ -819,9 +971,23 @@
     callback->onCodecFormatChanged(metadataBs);
 }
 
+void StreamOutHalHidl::onRecommendedLatencyModeChanged(
+        const std::vector<audio_latency_mode_t>& modes) {
+    sp<StreamOutHalInterfaceLatencyModeCallback> callback = mLatencyModeCallback.load().promote();
+    if (callback == nullptr) return;
+    callback->onRecommendedLatencyModeChanged(modes);
+}
 
-StreamInHalHidl::StreamInHalHidl(const sp<IStreamIn>& stream)
-        : StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
+status_t StreamOutHalHidl::exit() {
+    // FIXME this is using hard-coded strings but in the future, this functionality will be
+    //       converted to use audio HAL extensions required to support tunneling
+    return setParameters(String8("exiting=1"));
+}
+
+StreamInHalHidl::StreamInHalHidl(
+        const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
+        : StreamHalHidl("StreamInHalHidl", stream.get())
+        , mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
 }
 
 StreamInHalHidl::~StreamInHalHidl() {
@@ -834,16 +1000,19 @@
 }
 
 status_t StreamInHalHidl::getFrameSize(size_t *size) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("getFrameSize", mStream->getFrameSize(), size);
 }
 
 status_t StreamInHalHidl::setGain(float gain) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("setGain", mStream->setGain(gain));
 }
 
 status_t StreamInHalHidl::read(void *buffer, size_t bytes, size_t *read) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     *read = 0;
 
@@ -971,11 +1140,13 @@
 }
 
 status_t StreamInHalHidl::getInputFramesLost(uint32_t *framesLost) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     return processReturn("getInputFramesLost", mStream->getInputFramesLost(), framesLost);
 }
 
 status_t StreamInHalHidl::getCapturePosition(int64_t *frames, int64_t *time) {
+    TIME_CHECK();
     if (mStream == 0) return NO_INIT;
     if (mReaderClient == gettid() && mCommandMQ) {
         ReadParameters params;
@@ -1015,6 +1186,7 @@
 #elif MAJOR_VERSION >= 4
 status_t StreamInHalHidl::getActiveMicrophones(
         std::vector<media::MicrophoneInfo> *microphonesInfo) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     Result retval;
     Return<void> ret = mStream->getActiveMicrophones(
@@ -1033,7 +1205,12 @@
 
 status_t StreamInHalHidl::updateSinkMetadata(const
         StreamInHalInterface::SinkMetadata& sinkMetadata) {
-    CPP_VERSION::SinkMetadata hidlMetadata;
+    TIME_CHECK();
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
+#endif
     if (status_t status = CoreUtils::sinkMetadataFromHalV7(
                     sinkMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
             status != OK) {
@@ -1056,17 +1233,18 @@
 }
 #else
 status_t StreamInHalHidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("setPreferredMicrophoneDirection",
         mStream->setMicrophoneDirection(static_cast<MicrophoneDirection>(direction)));
 }
 
 status_t StreamInHalHidl::setPreferredMicrophoneFieldDimension(float zoom) {
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
     return processReturn("setPreferredMicrophoneFieldDimension",
                 mStream->setMicrophoneFieldDimension(zoom));
 }
 #endif
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 6f5dd04..54fbefe 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -19,34 +19,33 @@
 
 #include <atomic>
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStream.h)
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamIn.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
+#include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <mediautils/Synchronization.h>
 
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
 #include "StreamPowerLog.h"
 
-using ::android::hardware::audio::CPP_VERSION::IStream;
-using ::android::hardware::audio::CPP_VERSION::IStreamIn;
-using ::android::hardware::audio::CPP_VERSION::IStreamOut;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStream;
 using ::android::hardware::EventFlag;
 using ::android::hardware::MessageQueue;
 using ::android::hardware::Return;
-using ReadParameters = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadParameters;
-using ReadStatus = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadStatus;
+using ReadParameters =
+        ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadParameters;
+using ReadStatus = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadStatus;
 using WriteCommand = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteCommand;
 using WriteStatus = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteStatus;
 
 namespace android {
-namespace CPP_VERSION {
 
 class DeviceHalHidl;
 
-class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
+class StreamHalHidl : public virtual StreamHalInterface, public CoreConversionHelperHidl
 {
   public:
     // Return size of input/output buffer in bytes for this stream - eg. 4800.
@@ -90,9 +89,15 @@
     // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
     virtual status_t setHalThreadPriority(int priority);
 
+    status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+                                            std::optional<audio_source_t> source,
+                                            audio_devices_t type) override;
+
+    status_t legacyReleaseAudioPatch() override;
+
   protected:
     // Subclasses can not be constructed directly by clients.
-    explicit StreamHalHidl(IStream *stream);
+    StreamHalHidl(std::string_view className, IStream *stream);
 
     ~StreamHalHidl() override;
 
@@ -191,6 +196,15 @@
     // Methods used by StreamCodecFormatCallback (HIDL).
     void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
 
+    status_t setLatencyMode(audio_latency_mode_t mode) override;
+    status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
+    status_t setLatencyModeCallback(
+            const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) override;
+
+    void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
+
+    status_t exit() override;
+
   private:
     friend class DeviceHalHidl;
     typedef MessageQueue<WriteCommand, hardware::kSynchronizedReadWrite> CommandMQ;
@@ -199,7 +213,9 @@
 
     mediautils::atomic_wp<StreamOutHalInterfaceCallback> mCallback;
     mediautils::atomic_wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-    const sp<IStreamOut> mStream;
+    mediautils::atomic_wp<StreamOutHalInterfaceLatencyModeCallback> mLatencyModeCallback;
+
+    const sp<::android::hardware::audio::CPP_VERSION::IStreamOut> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
@@ -207,7 +223,7 @@
     EventFlag* mEfGroup;
 
     // Can not be constructed directly by clients.
-    StreamOutHalHidl(const sp<IStreamOut>& stream);
+    StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
 
     virtual ~StreamOutHalHidl();
 
@@ -255,7 +271,7 @@
     typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
     typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
 
-    const sp<IStreamIn> mStream;
+    const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
@@ -263,7 +279,8 @@
     EventFlag* mEfGroup;
 
     // Can not be constructed directly by clients.
-    StreamInHalHidl(const sp<IStreamIn>& stream);
+    StreamInHalHidl(
+            const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream);
 
     virtual ~StreamInHalHidl();
 
@@ -273,7 +290,6 @@
     status_t prepareForReading(size_t bufferSize);
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_STREAM_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
deleted file mode 100644
index 11fac61..0000000
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ /dev/null
@@ -1,524 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "StreamHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <audio_utils/Metadata.h>
-#include <hardware/audio.h>
-#include <media/AudioParameter.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "ParameterUtils.h"
-#include "StreamHalLocal.h"
-
-namespace android {
-namespace CPP_VERSION {
-
-StreamHalLocal::StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device)
-        : mDevice(device),
-          mStream(stream) {
-    // Instrument audio signal power logging.
-    // Note: This assumes channel mask, format, and sample rate do not change after creation.
-    if (mStream != nullptr /* && mStreamPowerLog.isUserDebugOrEngBuild() */) {
-        mStreamPowerLog.init(mStream->get_sample_rate(mStream),
-                mStream->get_channels(mStream),
-                mStream->get_format(mStream));
-    }
-}
-
-StreamHalLocal::~StreamHalLocal() {
-    mStream = 0;
-    mDevice.clear();
-}
-
-status_t StreamHalLocal::getBufferSize(size_t *size) {
-    *size = mStream->get_buffer_size(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
-    configBase->sample_rate = mStream->get_sample_rate(mStream);
-    configBase->channel_mask = mStream->get_channels(mStream);
-    configBase->format = mStream->get_format(mStream);
-    return OK;
-}
-
-status_t StreamHalLocal::setParameters(const String8& kvPairs) {
-    return mStream->set_parameters(mStream, kvPairs.string());
-}
-
-status_t StreamHalLocal::getParameters(const String8& keys, String8 *values) {
-    char *halValues = mStream->get_parameters(mStream, keys.string());
-    if (halValues != NULL) {
-        values->setTo(halValues);
-        free(halValues);
-    } else {
-        values->clear();
-    }
-    return OK;
-}
-
-status_t StreamHalLocal::addEffect(sp<EffectHalInterface>) {
-    LOG_ALWAYS_FATAL("Local streams can not have effects");
-    return INVALID_OPERATION;
-}
-
-status_t StreamHalLocal::removeEffect(sp<EffectHalInterface>) {
-    LOG_ALWAYS_FATAL("Local streams can not have effects");
-    return INVALID_OPERATION;
-}
-
-status_t StreamHalLocal::standby() {
-    return mStream->standby(mStream);
-}
-
-status_t StreamHalLocal::dump(int fd, const Vector<String16>& args) {
-    (void) args;
-    status_t status = mStream->dump(mStream, fd);
-    mStreamPowerLog.dump(fd);
-    return status;
-}
-
-status_t StreamHalLocal::setHalThreadPriority(int) {
-    // Don't need to do anything as local hal is executed by audioflinger directly
-    // on the same thread.
-    return OK;
-}
-
-StreamOutHalLocal::StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device)
-        : StreamHalLocal(&stream->common, device), mStream(stream) {
-}
-
-StreamOutHalLocal::~StreamOutHalLocal() {
-    mCallback.clear();
-    mDevice->closeOutputStream(mStream);
-    mStream = 0;
-}
-
-status_t StreamOutHalLocal::getFrameSize(size_t *size) {
-    *size = audio_stream_out_frame_size(mStream);
-    return OK;
-}
-
-status_t StreamOutHalLocal::getLatency(uint32_t *latency) {
-    *latency = mStream->get_latency(mStream);
-    return OK;
-}
-
-status_t StreamOutHalLocal::setVolume(float left, float right) {
-    if (mStream->set_volume == NULL) return INVALID_OPERATION;
-    return mStream->set_volume(mStream, left, right);
-}
-
-status_t StreamOutHalLocal::selectPresentation(int presentationId, int programId) {
-    AudioParameter param;
-    param.addInt(String8(AudioParameter::keyPresentationId), presentationId);
-    param.addInt(String8(AudioParameter::keyProgramId), programId);
-    return setParameters(param.toString());
-}
-
-status_t StreamOutHalLocal::write(const void *buffer, size_t bytes, size_t *written) {
-    ssize_t writeResult = mStream->write(mStream, buffer, bytes);
-    if (writeResult > 0) {
-        *written = writeResult;
-        mStreamPowerLog.log(buffer, *written);
-        return OK;
-    } else {
-        *written = 0;
-        return writeResult;
-    }
-}
-
-status_t StreamOutHalLocal::getRenderPosition(uint32_t *dspFrames) {
-    return mStream->get_render_position(mStream, dspFrames);
-}
-
-status_t StreamOutHalLocal::getNextWriteTimestamp(int64_t *timestamp) {
-    if (mStream->get_next_write_timestamp == NULL) return INVALID_OPERATION;
-    return mStream->get_next_write_timestamp(mStream, timestamp);
-}
-
-status_t StreamOutHalLocal::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
-    if (mStream->set_callback == NULL) return INVALID_OPERATION;
-    status_t result = mStream->set_callback(mStream, StreamOutHalLocal::asyncCallback, this);
-    if (result == OK) {
-        mCallback = callback;
-    }
-    return result;
-}
-
-// static
-int StreamOutHalLocal::asyncCallback(stream_callback_event_t event, void*, void *cookie) {
-    // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
-    // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
-    // already running, because the destructor is invoked after the refcount has been atomically
-    // decremented.
-    wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
-    sp<StreamOutHalLocal> self = weakSelf.promote();
-    if (self == 0) return 0;
-    sp<StreamOutHalInterfaceCallback> callback = self->mCallback.promote();
-    if (callback == 0) return 0;
-    ALOGV("asyncCallback() event %d", event);
-    switch (event) {
-        case STREAM_CBK_EVENT_WRITE_READY:
-            callback->onWriteReady();
-            break;
-        case STREAM_CBK_EVENT_DRAIN_READY:
-            callback->onDrainReady();
-            break;
-        case STREAM_CBK_EVENT_ERROR:
-            callback->onError();
-            break;
-        default:
-            ALOGW("asyncCallback() unknown event %d", event);
-            break;
-    }
-    return 0;
-}
-
-status_t StreamOutHalLocal::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
-    *supportsPause = mStream->pause != NULL;
-    *supportsResume = mStream->resume != NULL;
-    return OK;
-}
-
-status_t StreamOutHalLocal::pause() {
-    if (mStream->pause == NULL) return INVALID_OPERATION;
-    return mStream->pause(mStream);
-}
-
-status_t StreamOutHalLocal::resume() {
-    if (mStream->resume == NULL) return INVALID_OPERATION;
-    return mStream->resume(mStream);
-}
-
-status_t StreamOutHalLocal::supportsDrain(bool *supportsDrain) {
-    *supportsDrain = mStream->drain != NULL;
-    return OK;
-}
-
-status_t StreamOutHalLocal::drain(bool earlyNotify) {
-    if (mStream->drain == NULL) return INVALID_OPERATION;
-    return mStream->drain(mStream, earlyNotify ? AUDIO_DRAIN_EARLY_NOTIFY : AUDIO_DRAIN_ALL);
-}
-
-status_t StreamOutHalLocal::flush() {
-    if (mStream->flush == NULL) return INVALID_OPERATION;
-    return mStream->flush(mStream);
-}
-
-status_t StreamOutHalLocal::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
-    if (mStream->get_presentation_position == NULL) return INVALID_OPERATION;
-    return mStream->get_presentation_position(mStream, frames, timestamp);
-}
-
-void StreamOutHalLocal::doUpdateSourceMetadata(const SourceMetadata& sourceMetadata) {
-    std::vector<playback_track_metadata> halTracks;
-    halTracks.reserve(sourceMetadata.tracks.size());
-    for (auto& metadata : sourceMetadata.tracks) {
-        playback_track_metadata halTrackMetadata;
-        playback_track_metadata_from_v7(&halTrackMetadata, &metadata);
-        halTracks.push_back(halTrackMetadata);
-    }
-    const source_metadata_t halMetadata = {
-        .track_count = halTracks.size(),
-        .tracks = halTracks.data(),
-    };
-    mStream->update_source_metadata(mStream, &halMetadata);
-}
-
-#if MAJOR_VERSION >= 7
-void StreamOutHalLocal::doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata) {
-    const source_metadata_v7_t metadata {
-        .track_count = sourceMetadata.tracks.size(),
-        // const cast is fine as it is in a const structure
-        .tracks = const_cast<playback_track_metadata_v7*>(sourceMetadata.tracks.data()),
-    };
-    mStream->update_source_metadata_v7(mStream, &metadata);
-}
-#endif
-
-status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
-#if MAJOR_VERSION < 7
-    if (mStream->update_source_metadata == nullptr) {
-        return INVALID_OPERATION;
-    }
-    doUpdateSourceMetadata(sourceMetadata);
-#else
-    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
-        if (mStream->update_source_metadata == nullptr) {
-            return INVALID_OPERATION;
-        }
-        doUpdateSourceMetadata(sourceMetadata);
-    } else {
-        if (mStream->update_source_metadata_v7 == nullptr) {
-            return INVALID_OPERATION;
-        }
-        doUpdateSourceMetadataV7(sourceMetadata);
-    }
-#endif
-    return OK;
-}
-
-
-status_t StreamOutHalLocal::start() {
-    if (mStream->start == NULL) return INVALID_OPERATION;
-    return mStream->start(mStream);
-}
-
-status_t StreamOutHalLocal::stop() {
-    if (mStream->stop == NULL) return INVALID_OPERATION;
-    return mStream->stop(mStream);
-}
-
-status_t StreamOutHalLocal::createMmapBuffer(int32_t minSizeFrames,
-                                  struct audio_mmap_buffer_info *info) {
-    if (mStream->create_mmap_buffer == NULL) return INVALID_OPERATION;
-    return mStream->create_mmap_buffer(mStream, minSizeFrames, info);
-}
-
-status_t StreamOutHalLocal::getMmapPosition(struct audio_mmap_position *position) {
-    if (mStream->get_mmap_position == NULL) return INVALID_OPERATION;
-    return mStream->get_mmap_position(mStream, position);
-}
-
-status_t StreamOutHalLocal::getDualMonoMode(audio_dual_mono_mode_t* mode) {
-    if (mStream->get_dual_mono_mode == nullptr) return INVALID_OPERATION;
-    return mStream->get_dual_mono_mode(mStream, mode);
-}
-
-status_t StreamOutHalLocal::setDualMonoMode(audio_dual_mono_mode_t mode) {
-    if (mStream->set_dual_mono_mode == nullptr) return INVALID_OPERATION;
-    return mStream->set_dual_mono_mode(mStream, mode);
-}
-
-status_t StreamOutHalLocal::getAudioDescriptionMixLevel(float* leveldB) {
-    if (mStream->get_audio_description_mix_level == nullptr) return INVALID_OPERATION;
-    return mStream->get_audio_description_mix_level(mStream, leveldB);
-}
-
-status_t StreamOutHalLocal::setAudioDescriptionMixLevel(float leveldB) {
-    if (mStream->set_audio_description_mix_level == nullptr) return INVALID_OPERATION;
-    return mStream->set_audio_description_mix_level(mStream, leveldB);
-}
-
-status_t StreamOutHalLocal::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
-    if (mStream->get_playback_rate_parameters == nullptr) return INVALID_OPERATION;
-    return mStream->get_playback_rate_parameters(mStream, playbackRate);
-}
-
-status_t StreamOutHalLocal::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
-    if (mStream->set_playback_rate_parameters == nullptr) return INVALID_OPERATION;
-    return mStream->set_playback_rate_parameters(mStream, &playbackRate);
-}
-
-status_t StreamOutHalLocal::setEventCallback(
-        const sp<StreamOutHalInterfaceEventCallback>& callback) {
-    if (mStream->set_event_callback == nullptr) {
-        return INVALID_OPERATION;
-    }
-    stream_event_callback_t asyncCallback =
-            callback == nullptr ? nullptr : StreamOutHalLocal::asyncEventCallback;
-    status_t result = mStream->set_event_callback(mStream, asyncCallback, this);
-    if (result == OK) {
-        mEventCallback = callback;
-    }
-    return result;
-}
-
-// static
-int StreamOutHalLocal::asyncEventCallback(
-        stream_event_callback_type_t event, void *param, void *cookie) {
-    // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
-    // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
-    // already running, because the destructor is invoked after the refcount has been atomically
-    // decremented.
-    wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
-    sp<StreamOutHalLocal> self = weakSelf.promote();
-    if (self == nullptr) return 0;
-    sp<StreamOutHalInterfaceEventCallback> callback = self->mEventCallback.promote();
-    if (callback.get() == nullptr) return 0;
-    switch (event) {
-        case STREAM_EVENT_CBK_TYPE_CODEC_FORMAT_CHANGED:
-            // void* param is the byte string buffer from byte_string_from_audio_metadata().
-            // As the byte string buffer may have embedded zeroes, we cannot use strlen()
-            callback->onCodecFormatChanged(std::basic_string<uint8_t>(
-                    (const uint8_t*)param,
-                    audio_utils::metadata::dataByteStringLen((const uint8_t*)param)));
-            break;
-        default:
-            ALOGW("%s unknown event %d", __func__, event);
-            break;
-    }
-    return 0;
-}
-
-StreamInHalLocal::StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device)
-        : StreamHalLocal(&stream->common, device), mStream(stream) {
-}
-
-StreamInHalLocal::~StreamInHalLocal() {
-    mDevice->closeInputStream(mStream);
-    mStream = 0;
-}
-
-status_t StreamInHalLocal::getFrameSize(size_t *size) {
-    *size = audio_stream_in_frame_size(mStream);
-    return OK;
-}
-
-status_t StreamInHalLocal::setGain(float gain) {
-    return mStream->set_gain(mStream, gain);
-}
-
-status_t StreamInHalLocal::read(void *buffer, size_t bytes, size_t *read) {
-    ssize_t readResult = mStream->read(mStream, buffer, bytes);
-    if (readResult > 0) {
-        *read = readResult;
-        mStreamPowerLog.log( buffer, *read);
-        return OK;
-    } else {
-        *read = 0;
-        return readResult;
-    }
-}
-
-status_t StreamInHalLocal::getInputFramesLost(uint32_t *framesLost) {
-    *framesLost = mStream->get_input_frames_lost(mStream);
-    return OK;
-}
-
-status_t StreamInHalLocal::getCapturePosition(int64_t *frames, int64_t *time) {
-    if (mStream->get_capture_position == NULL) return INVALID_OPERATION;
-    return mStream->get_capture_position(mStream, frames, time);
-}
-
-void StreamInHalLocal::doUpdateSinkMetadata(const SinkMetadata& sinkMetadata) {
-    std::vector<record_track_metadata> halTracks;
-    halTracks.reserve(sinkMetadata.tracks.size());
-    for (auto& metadata : sinkMetadata.tracks) {
-        record_track_metadata halTrackMetadata;
-        record_track_metadata_from_v7(&halTrackMetadata, &metadata);
-        halTracks.push_back(halTrackMetadata);
-    }
-    const sink_metadata_t halMetadata = {
-        .track_count = halTracks.size(),
-        .tracks = halTracks.data(),
-    };
-    mStream->update_sink_metadata(mStream, &halMetadata);
-}
-
-#if MAJOR_VERSION >= 7
-void StreamInHalLocal::doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata) {
-    const sink_metadata_v7_t halMetadata {
-        .track_count = sinkMetadata.tracks.size(),
-        // const cast is fine as it is in a const structure
-        .tracks = const_cast<record_track_metadata_v7*>(sinkMetadata.tracks.data()),
-    };
-    mStream->update_sink_metadata_v7(mStream, &halMetadata);
-}
-#endif
-
-status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
-#if MAJOR_VERSION < 7
-    if (mStream->update_sink_metadata == nullptr) {
-        return INVALID_OPERATION;  // not supported by the HAL
-    }
-    doUpdateSinkMetadata(sinkMetadata);
-#else
-    if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
-        if (mStream->update_sink_metadata == nullptr) {
-            return INVALID_OPERATION;  // not supported by the HAL
-        }
-        doUpdateSinkMetadata(sinkMetadata);
-    } else {
-        if (mStream->update_sink_metadata_v7 == nullptr) {
-            return INVALID_OPERATION;  // not supported by the HAL
-        }
-        doUpdateSinkMetadataV7(sinkMetadata);
-    }
-#endif
-    return OK;
-}
-
-status_t StreamInHalLocal::start() {
-    if (mStream->start == NULL) return INVALID_OPERATION;
-    return mStream->start(mStream);
-}
-
-status_t StreamInHalLocal::stop() {
-    if (mStream->stop == NULL) return INVALID_OPERATION;
-    return mStream->stop(mStream);
-}
-
-status_t StreamInHalLocal::createMmapBuffer(int32_t minSizeFrames,
-                                  struct audio_mmap_buffer_info *info) {
-    if (mStream->create_mmap_buffer == NULL) return INVALID_OPERATION;
-    return mStream->create_mmap_buffer(mStream, minSizeFrames, info);
-}
-
-status_t StreamInHalLocal::getMmapPosition(struct audio_mmap_position *position) {
-    if (mStream->get_mmap_position == NULL) return INVALID_OPERATION;
-    return mStream->get_mmap_position(mStream, position);
-}
-
-#if MAJOR_VERSION == 2
-status_t StreamInHalLocal::getActiveMicrophones(
-        std::vector<media::MicrophoneInfo> *microphones __unused) {
-    return INVALID_OPERATION;
-}
-#elif MAJOR_VERSION >= 4
-status_t StreamInHalLocal::getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
-    if (mStream->get_active_microphones == NULL) return INVALID_OPERATION;
-    size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
-    audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
-    status_t status = mStream->get_active_microphones(mStream, &mic_array[0], &actual_mics);
-    for (size_t i = 0; i < actual_mics; i++) {
-        media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
-        microphones->push_back(microphoneInfo);
-    }
-    return status;
-}
-#endif
-
-#if MAJOR_VERSION < 5
-status_t StreamInHalLocal::setPreferredMicrophoneDirection(
-            audio_microphone_direction_t direction __unused) {
-    return INVALID_OPERATION;
-}
-
-status_t StreamInHalLocal::setPreferredMicrophoneFieldDimension(float zoom __unused) {
-    return INVALID_OPERATION;
-}
-#else
-status_t StreamInHalLocal::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
-    if (mStream->set_microphone_direction == NULL) return INVALID_OPERATION;
-    return mStream->set_microphone_direction(mStream, direction);
-}
-
-status_t StreamInHalLocal::setPreferredMicrophoneFieldDimension(float zoom) {
-    if (mStream->set_microphone_field_dimension == NULL) return INVALID_OPERATION;
-    return mStream->set_microphone_field_dimension(mStream, zoom);
-
-}
-#endif
-
-} // namespace CPP_VERSION
-} // namespace android
-
-
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
deleted file mode 100644
index 493c521..0000000
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
-#define ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
-
-#include <media/audiohal/StreamHalInterface.h>
-#include "StreamPowerLog.h"
-
-namespace android {
-namespace CPP_VERSION {
-
-class DeviceHalLocal;
-
-class StreamHalLocal : public virtual StreamHalInterface
-{
-  public:
-    // Return size of input/output buffer in bytes for this stream - eg. 4800.
-    virtual status_t getBufferSize(size_t *size);
-
-    // Return the base configuration of the stream:
-    //   - channel mask;
-    //   - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
-    //   - sampling rate in Hz - eg. 44100.
-    virtual status_t getAudioProperties(audio_config_base_t *configBase);
-
-    // Set audio stream parameters.
-    virtual status_t setParameters(const String8& kvPairs);
-
-    // Get audio stream parameters.
-    virtual status_t getParameters(const String8& keys, String8 *values);
-
-    // Add or remove the effect on the stream.
-    virtual status_t addEffect(sp<EffectHalInterface> effect);
-    virtual status_t removeEffect(sp<EffectHalInterface> effect);
-
-    // Put the audio hardware input/output into standby mode.
-    virtual status_t standby();
-
-    virtual status_t dump(int fd, const Vector<String16>& args) override;
-
-    // Start a stream operating in mmap mode.
-    virtual status_t start() = 0;
-
-    // Stop a stream operating in mmap mode.
-    virtual status_t stop() = 0;
-
-    // Retrieve information on the data buffer in mmap mode.
-    virtual status_t createMmapBuffer(int32_t minSizeFrames,
-                                      struct audio_mmap_buffer_info *info) = 0;
-
-    // Get current read/write position in the mmap buffer
-    virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
-
-    // Set the priority of the thread that interacts with the HAL
-    // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
-    virtual status_t setHalThreadPriority(int priority);
-
-  protected:
-    // Subclasses can not be constructed directly by clients.
-    StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device);
-
-    // The destructor automatically closes the stream.
-    virtual ~StreamHalLocal();
-
-    sp<DeviceHalLocal> mDevice;
-
-    // mStreamPowerLog is used for audio signal power logging.
-    StreamPowerLog mStreamPowerLog;
-
-  private:
-    audio_stream_t *mStream;
-};
-
-class StreamOutHalLocal : public StreamOutHalInterface, public StreamHalLocal {
-  public:
-    // Return the frame size (number of bytes per sample) of a stream.
-    virtual status_t getFrameSize(size_t *size);
-
-    // Return the audio hardware driver estimated latency in milliseconds.
-    virtual status_t getLatency(uint32_t *latency);
-
-    // Use this method in situations where audio mixing is done in the hardware.
-    virtual status_t setVolume(float left, float right);
-
-    // Selects the audio presentation (if available).
-    virtual status_t selectPresentation(int presentationId, int programId);
-
-    // Write audio buffer to driver.
-    virtual status_t write(const void *buffer, size_t bytes, size_t *written);
-
-    // Return the number of audio frames written by the audio dsp to DAC since
-    // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames);
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp);
-
-    // Set the callback for notifying completion of non-blocking write and drain.
-    virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
-
-    // Returns whether pause and resume operations are supported.
-    virtual status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume);
-
-    // Notifies to the audio driver to resume playback following a pause.
-    virtual status_t pause();
-
-    // Notifies to the audio driver to resume playback following a pause.
-    virtual status_t resume();
-
-    // Returns whether drain operation is supported.
-    virtual status_t supportsDrain(bool *supportsDrain);
-
-    // Requests notification when data buffered by the driver/hardware has been played.
-    virtual status_t drain(bool earlyNotify);
-
-    // Notifies to the audio driver to flush the queued data.
-    virtual status_t flush();
-
-    // Return a recent count of the number of audio frames presented to an external observer.
-    virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
-
-    // Start a stream operating in mmap mode.
-    virtual status_t start();
-
-    // Stop a stream operating in mmap mode.
-    virtual status_t stop();
-
-    // Retrieve information on the data buffer in mmap mode.
-    virtual status_t createMmapBuffer(int32_t minSizeFrames,
-                                      struct audio_mmap_buffer_info *info);
-
-    // Get current read/write position in the mmap buffer
-    virtual status_t getMmapPosition(struct audio_mmap_position *position);
-
-    // Called when the metadata of the stream's source has been changed.
-    status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
-
-    // Returns the Dual Mono mode presentation setting.
-    status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
-
-    // Sets the Dual Mono mode presentation on the output device.
-    status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
-
-    // Returns the Audio Description Mix level in dB.
-    status_t getAudioDescriptionMixLevel(float* leveldB) override;
-
-    // Sets the Audio Description Mix level in dB.
-    status_t setAudioDescriptionMixLevel(float leveldB) override;
-
-    // Retrieves current playback rate parameters.
-    status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
-
-    // Sets the playback rate parameters that control playback behavior.
-    status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
-
-    status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
-
-  private:
-    audio_stream_out_t *mStream;
-    wp<StreamOutHalInterfaceCallback> mCallback;
-    wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-
-    friend class DeviceHalLocal;
-
-    // Can not be constructed directly by clients.
-    StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device);
-
-    virtual ~StreamOutHalLocal();
-
-    static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
-
-    static int asyncEventCallback(stream_event_callback_type_t event, void *param, void *cookie);
-
-    void doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata);
-    void doUpdateSourceMetadata(const SourceMetadata& sourceMetadata);
-};
-
-class StreamInHalLocal : public StreamInHalInterface, public StreamHalLocal {
-  public:
-    // Return the frame size (number of bytes per sample) of a stream.
-    virtual status_t getFrameSize(size_t *size);
-
-    // Set the input gain for the audio driver.
-    virtual status_t setGain(float gain);
-
-    // Read audio buffer in from driver.
-    virtual status_t read(void *buffer, size_t bytes, size_t *read);
-
-    // Return the amount of input frames lost in the audio driver.
-    virtual status_t getInputFramesLost(uint32_t *framesLost);
-
-    // Return a recent count of the number of audio frames received and
-    // the clock time associated with that frame count.
-    virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
-
-    // Start a stream operating in mmap mode.
-    virtual status_t start();
-
-    // Stop a stream operating in mmap mode.
-    virtual status_t stop();
-
-    // Retrieve information on the data buffer in mmap mode.
-    virtual status_t createMmapBuffer(int32_t minSizeFrames,
-                                      struct audio_mmap_buffer_info *info);
-
-    // Get current read/write position in the mmap buffer
-    virtual status_t getMmapPosition(struct audio_mmap_position *position);
-
-    // Get active microphones
-    virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
-
-    // Sets microphone direction (for processing)
-    virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
-
-    // Sets microphone zoom (for processing)
-    virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
-
-    // Called when the metadata of the stream's sink has been changed.
-    status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
-
-  private:
-    audio_stream_in_t *mStream;
-
-    friend class DeviceHalLocal;
-
-    // Can not be constructed directly by clients.
-    StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device);
-
-    virtual ~StreamInHalLocal();
-
-    void doUpdateSinkMetadata(const SinkMetadata& sinkMetadata);
-    void doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata);
-};
-
-} // namespace CPP_VERSION
-} // namespace android
-
-#endif // ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index f6a554b..c08ee47 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -24,7 +24,6 @@
 #include <system/audio.h>
 
 namespace android {
-namespace CPP_VERSION {
 
 class StreamPowerLog {
 public:
@@ -99,7 +98,6 @@
     size_t mFrameSize;
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_STREAM_POWER_LOG_H
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 69cbcec..d27ad4c 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -17,6 +17,9 @@
 #ifndef ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
 #define ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
 
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <error/Result.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/MicrophoneInfo.h>
 #include <system/audio.h>
@@ -120,6 +123,17 @@
     virtual status_t removeDeviceEffect(
             audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
 
+    virtual status_t getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos)  = 0;
+    virtual int32_t getAAudioMixerBurstCount() = 0;
+    virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
+
+    // Update the connection status of an external device.
+    virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+
+    virtual error::Result<audio_hw_sync_t> getHwAvSync() = 0;
+
     virtual status_t dump(int fd, const Vector<String16>& args) = 0;
 
   protected:
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index 5091558..17010e6 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -43,6 +43,8 @@
     // The callback can be only set once.
     virtual status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) = 0;
 
+    virtual float getHalVersion() const = 0;
+
     static sp<DevicesFactoryHalInterface> create();
 
   protected:
diff --git a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
index 03165bd..2969c92 100644
--- a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
@@ -57,6 +57,9 @@
 
     virtual status_t dump(int fd) = 0;
 
+    // Unique effect ID to use with the core HAL.
+    virtual uint64_t effectId() const = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     EffectHalInterface() {}
diff --git a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h b/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
index d353ed0..866dd3e 100644
--- a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
+++ b/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
@@ -18,21 +18,42 @@
 #define ANDROID_HARDWARE_FACTORY_HAL_HIDL_H
 
 #include <string>
+#include <utility>
 
 #include <utils/StrongPointer.h>
 
 namespace android {
 
+// The pair of the interface's package name and the interface name,
+// e.g. <"android.hardware.audio", "IDevicesFactory">.
+// Splitting is used for easier construction of versioned names (FQNs).
+using InterfaceName = std::pair<std::string, std::string>;
+
 namespace detail {
 
-void* createPreferredImpl(const std::string& package, const std::string& interface);
+void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface);
 
 }  // namespace detail
 
-/** @Return the preferred available implementation or nullptr if none are available. */
+/**
+ * Create a client for the "preferred" (most recent) implementation of an interface.
+ * by loading the appropriate version of the shared library containing the implementation.
+ *
+ * In the audio HAL, there are two families of interfaces: core and effects. Both are
+ * packed into the same shared library for memory efficiency. Since the core and the effects
+ * interface can have different minor versions on the device, in order to avoid loading multiple
+ * shared libraries the loader function considers which interface among two has the most
+ * recent version. Thus, a pair of interface names must be passed in.
+ *
+ * @param iface the interface that needs to be created.
+ * @param siblingIface the interface which occupies the same shared library.
+ * @return the preferred available implementation or nullptr if none are available.
+ */
 template <class Interface>
-static sp<Interface> createPreferredImpl(const std::string& package, const std::string& interface) {
-    return sp<Interface>{static_cast<Interface*>(detail::createPreferredImpl(package, interface))};
+static sp<Interface> createPreferredImpl(
+        const InterfaceName& iface, const InterfaceName& siblingIface) {
+    return sp<Interface>{
+        static_cast<Interface*>(detail::createPreferredImpl(iface, siblingIface))};
 }
 
 } // namespace android
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 2b5b2db..1d52b7d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -89,6 +89,12 @@
     // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
     virtual status_t setHalThreadPriority(int priority) = 0;
 
+    virtual status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+                                            std::optional<audio_source_t> source,
+                                            audio_devices_t type) = 0;
+
+    virtual status_t legacyReleaseAudioPatch() = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     StreamHalInterface() {}
@@ -117,6 +123,18 @@
     virtual ~StreamOutHalInterfaceEventCallback() {}
 };
 
+class StreamOutHalInterfaceLatencyModeCallback : public virtual RefBase {
+public:
+    /**
+     * Called with the new list of supported latency modes when a change occurs.
+     */
+    virtual void onRecommendedLatencyModeChanged(std::vector<audio_latency_mode_t> modes) = 0;
+
+protected:
+    StreamOutHalInterfaceLatencyModeCallback() {}
+    virtual ~StreamOutHalInterfaceLatencyModeCallback() {}
+};
+
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -194,6 +212,47 @@
 
     virtual status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) = 0;
 
+    /**
+     * Indicates the requested latency mode for this output stream.
+     *
+     * The requested mode can be one of the modes returned by
+     * getRecommendedLatencyModes() API.
+     *
+     * @param mode the requested latency mode.
+     * @return operation completion status.
+     */
+    virtual status_t setLatencyMode(audio_latency_mode_t mode) = 0;
+
+    /**
+     * Indicates which latency modes are currently supported on this output stream.
+     * If the transport protocol (e.g Bluetooth A2DP) used by this output stream to reach
+     * the output device supports variable latency modes, the HAL indicates which
+     * modes are currently supported.
+     * The framework can then call setLatencyMode() with one of the supported modes to select
+     * the desired operation mode.
+     *
+     * @param modes currrently supported latency modes.
+     * @return operation completion status.
+     */
+    virtual status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) = 0;
+
+    /**
+     * Set the callback interface for notifying changes in supported latency modes.
+     *
+     * Calling this method with a null pointer will result in releasing
+     * the callback.
+     *
+     * @param callback the registered callback or null to unregister.
+     * @return operation completion status.
+     */
+    virtual status_t setLatencyModeCallback(
+            const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) = 0;
+
+    /**
+     * Signal the end of audio output, interrupting an ongoing 'write' operation.
+     */
+    virtual status_t exit() = 0;
+
   protected:
     virtual ~StreamOutHalInterface() {}
 };
diff --git a/media/libaudioprocessing/TEST_MAPPING b/media/libaudioprocessing/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9fa
--- /dev/null
+++ b/media/libaudioprocessing/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index b26d028..abe622d 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -24,6 +24,10 @@
     vendor: true,
     srcs: ["EffectDownmix.cpp"],
 
+    export_include_dirs: [
+        ".",
+    ],
+
     shared_libs: [
         "libaudioutils",
         "libcutils",
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 4940117..392a6fa 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -18,9 +18,6 @@
     gtest: true,
     host_supported: true,
     vendor: true,
-    include_dirs: [
-        "frameworks/av/media/libeffects/downmix",
-    ],
     header_libs: [
         "libaudioeffects",
     ],
@@ -51,9 +48,6 @@
     name:"downmixtest",
     host_supported: false,
     proprietary: true,
-    include_dirs: [
-        "frameworks/av/media/libeffects/downmix",
-    ],
 
     header_libs: [
         "libaudioeffects",
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index 8a25b85..c21c5f2 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -29,9 +29,6 @@
     name: "reverb_benchmark",
     vendor: true,
     host_supported: true,
-    include_dirs: [
-        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
-    ],
     srcs: ["reverb_benchmark.cpp"],
     static_libs: [
         "libreverb",
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 1eadd27..ccef5ab 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -135,7 +135,6 @@
     LVM_UINT32 fs =
             (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
     LVM_UINT32 fc;     /* Filter centre frequency */
-    LVM_INT16 QFactor; /* Filter Q factor */
 
     pInstance->NBands = pParams->NBands;
 
@@ -144,7 +143,6 @@
          * Get the filter settings
          */
         fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
-        QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
 
         pInstance->pBiquadType[i] = LVEQNB_SinglePrecision_Float; /* Default to single precision */
 
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
index 8e63502..ffed6d4 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
@@ -421,7 +421,6 @@
      * Intermediate variables and temporary values
      */
     LVM_FLOAT T0;
-    LVM_FLOAT D;
     LVM_FLOAT A0;
     LVM_FLOAT B1;
     LVM_FLOAT B2;
@@ -444,9 +443,6 @@
      * Calculating the intermediate values
      */
     T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
-    /* Force D = 1 : the function was originally used for a peaking filter.
-       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
@@ -535,7 +531,6 @@
      * Intermediate variables and temporary values
      */
     LVM_FLOAT T0;
-    LVM_FLOAT D;
     LVM_FLOAT A0;
     LVM_FLOAT B1;
     LVM_FLOAT B2;
@@ -558,9 +553,6 @@
      * Calculating the intermediate values
      */
     T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
-    /* Force D = 1 : the function was originally used for a peaking filter.
-       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 9939ed1..7d7f8b9 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -18,10 +18,6 @@
         "EffectReverbTest.cpp",
         "EffectTestHelper.cpp",
     ],
-    include_dirs: [
-        "frameworks/av/media/libeffects/lvm/lib/Common/lib",
-        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
-    ],
     static_libs: [
         "libaudioutils",
         "libreverb",
@@ -108,10 +104,6 @@
     proprietary: true,
     gtest: false,
 
-    include_dirs: [
-        "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
-    ],
-
     header_libs: [
         "libaudioeffects",
     ],
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index e169e3c..1287514 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -89,6 +89,8 @@
 
     local_include_dirs: ["Reverb"],
 
+    export_include_dirs: ["Reverb"],
+
     header_libs: [
         "libhardware_headers",
         "libaudioeffects",
diff --git a/media/libeffects/preprocessing/.clang-format b/media/libeffects/preprocessing/.clang-format
deleted file mode 120000
index f1b4f69..0000000
--- a/media/libeffects/preprocessing/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 19a8b2f..61a2bf5 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -150,6 +150,7 @@
 
 bool sHasAuxChannels[PREPROC_NUM_EFFECTS] = {
         false,  // PREPROC_AGC
+        false,  // PREPROC_AGC2
         true,   // PREPROC_AEC
         true,   // PREPROC_NS
 };
diff --git a/media/libeffects/preprocessing/tests/correlation.cpp b/media/libeffects/preprocessing/tests/correlation.cpp
index eb56fc3..0853673 100644
--- a/media/libeffects/preprocessing/tests/correlation.cpp
+++ b/media/libeffects/preprocessing/tests/correlation.cpp
@@ -36,7 +36,7 @@
                                                                    const int16_t* sigY, int len,
                                                                    int16_t enableCrossCorr) {
     float maxCorrVal = 0.f, prevCorrVal = 0.f;
-    int delay = 0, peakIndex = 0, flag = 0;
+    int peakIndex = 0, flag = 0;
     int loopLim = (1 == enableCrossCorr) ? len : kMinLoopLimitValue;
     std::vector<int> peakIndexVect(kNumPeaks, 0);
     std::vector<float> peakValueVect(kNumPeaks, 0.f);
@@ -47,7 +47,6 @@
         }
         corrVal /= len - i;
         if (corrVal > maxCorrVal) {
-            delay = i;
             maxCorrVal = corrVal;
         }
         // Correlation peaks are expected to be observed at equal intervals. The interval length is
diff --git a/media/libeffects/testlibs/Android.bp b/media/libeffects/testlibs/Android.bp
new file mode 100644
index 0000000..5ba56bb
--- /dev/null
+++ b/media/libeffects/testlibs/Android.bp
@@ -0,0 +1,77 @@
+// Test Reverb library
+package {
+    default_applicable_licenses: [
+        "frameworks_av_media_libeffects_testlibs_license",
+    ],
+}
+
+license {
+    name: "frameworks_av_media_libeffects_testlibs_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_library {
+    name: "libreverbtest",
+    host_supported: true,
+    vendor: true,
+    srcs: [
+        "EffectReverb.c",
+        "EffectsMath.c",
+    ],
+
+    shared_libs: [
+        "libcutils",
+        "liblog",
+    ],
+
+    relative_install_path: "soundfx",
+
+    cflags: [
+        "-fvisibility=hidden",
+        "-Wall",
+        "-Werror",
+        "-Wno-address-of-packed-member",
+    ],
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+}
+
+cc_library {
+    name: "libequalizertest",
+    host_supported: true,
+    vendor: true,
+    srcs: [
+        "AudioBiquadFilter.cpp",
+        "AudioCoefInterpolator.cpp",
+        "AudioEqualizer.cpp",
+        "AudioPeakingFilter.cpp",
+        "AudioShelvingFilter.cpp",
+        "EffectEqualizer.cpp",
+        "EffectsMath.c",
+    ],
+
+    shared_libs: [
+        "libcutils",
+        "liblog",
+    ],
+
+    relative_install_path: "soundfx",
+
+    cflags: [
+        "-fvisibility=hidden",
+        "-Wall",
+        "-Werror",
+    ],
+
+    header_libs: [
+        "libaudioeffects",
+    ],
+}
diff --git a/media/libeffects/testlibs/Android.mk_ b/media/libeffects/testlibs/Android.mk_
deleted file mode 100644
index 14c373f..0000000
--- a/media/libeffects/testlibs/Android.mk_
+++ /dev/null
@@ -1,55 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-# Test Reverb library
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
-	EffectReverb.c.arm \
-	EffectsMath.c.arm
-
-LOCAL_CFLAGS := -O2
-
-LOCAL_SHARED_LIBRARIES := \
-	libcutils \
-	libdl
-
-LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE := libreverbtest
-
-LOCAL_C_INCLUDES := \
-	$(call include-path-for, audio-effects) \
-	$(call include-path-for, graphics corecg)
-
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-
-# Test Equalizer library
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
-	EffectsMath.c.arm \
-	EffectEqualizer.cpp \
-	AudioBiquadFilter.cpp.arm \
-	AudioCoefInterpolator.cpp.arm \
-	AudioPeakingFilter.cpp.arm \
-	AudioShelvingFilter.cpp.arm \
-	AudioEqualizer.cpp.arm
-
-LOCAL_CFLAGS := -O2
-
-LOCAL_SHARED_LIBRARIES := \
-	libcutils \
-	libdl
-
-LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE := libequalizertest
-
-LOCAL_C_INCLUDES := \
-	$(call include-path-for, graphics corecg) \
-	$(call include-path-for, audio-effects)
-
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/media/libeffects/testlibs/AudioEqualizer.cpp b/media/libeffects/testlibs/AudioEqualizer.cpp
index 4f3a308..141750b 100644
--- a/media/libeffects/testlibs/AudioEqualizer.cpp
+++ b/media/libeffects/testlibs/AudioEqualizer.cpp
@@ -19,7 +19,7 @@
 #include <assert.h>
 #include <stdlib.h>
 #include <new>
-#include <utils/Log.h>
+#include <log/log.h>
 
 #include "AudioEqualizer.h"
 #include "AudioPeakingFilter.h"
diff --git a/media/libeffects/testlibs/AudioPeakingFilter.cpp b/media/libeffects/testlibs/AudioPeakingFilter.cpp
index 99323ac..4257eca 100644
--- a/media/libeffects/testlibs/AudioPeakingFilter.cpp
+++ b/media/libeffects/testlibs/AudioPeakingFilter.cpp
@@ -87,9 +87,9 @@
 void AudioPeakingFilter::commit(bool immediate) {
     audio_coef_t coefs[5];
     int intCoord[3] = {
-        mFrequency >> FREQ_PRECISION_BITS,
+        (int)(mFrequency >> FREQ_PRECISION_BITS),
         mGain >> GAIN_PRECISION_BITS,
-        mBandwidth >> BANDWIDTH_PRECISION_BITS
+        (int)(mBandwidth >> BANDWIDTH_PRECISION_BITS)
     };
     uint32_t fracCoord[3] = {
         mFrequency << (32 - FREQ_PRECISION_BITS),
diff --git a/media/libeffects/testlibs/AudioShelvingFilter.cpp b/media/libeffects/testlibs/AudioShelvingFilter.cpp
index e031287..ad43c5a 100644
--- a/media/libeffects/testlibs/AudioShelvingFilter.cpp
+++ b/media/libeffects/testlibs/AudioShelvingFilter.cpp
@@ -89,8 +89,8 @@
 void AudioShelvingFilter::commit(bool immediate) {
     audio_coef_t coefs[5];
     int intCoord[2] = {
-        mFrequency >> FREQ_PRECISION_BITS,
-        mGain >> GAIN_PRECISION_BITS
+        (int)(mFrequency >> FREQ_PRECISION_BITS),
+        (int)(mGain >> GAIN_PRECISION_BITS)
     };
     uint32_t fracCoord[2] = {
         mFrequency << (32 - FREQ_PRECISION_BITS),
diff --git a/media/libeffects/testlibs/EffectEqualizer.cpp b/media/libeffects/testlibs/EffectEqualizer.cpp
index db4d009..72b530d 100644
--- a/media/libeffects/testlibs/EffectEqualizer.cpp
+++ b/media/libeffects/testlibs/EffectEqualizer.cpp
@@ -131,7 +131,8 @@
                             int32_t ioId,
                             effect_handle_t *pHandle) {
     int ret;
-    int i;
+    (void)sessionId;
+    (void)ioId;
 
     ALOGV("EffectLibCreateEffect start");
 
@@ -160,7 +161,7 @@
     pContext->state = EQUALIZER_STATE_INITIALIZED;
 
     ALOGV("EffectLibCreateEffect %p, size %d",
-         pContext, AudioEqualizer::GetInstanceSize(kNumBands)+sizeof(EqualizerContext));
+         pContext, (int)(AudioEqualizer::GetInstanceSize(kNumBands)+sizeof(EqualizerContext)));
 
     return 0;
 
@@ -294,7 +295,6 @@
 
 int Equalizer_init(EqualizerContext *pContext)
 {
-    int status;
 
     ALOGV("Equalizer_init start");
 
@@ -630,7 +630,6 @@
         void *pCmdData, uint32_t *replySize, void *pReplyData) {
 
     android::EqualizerContext * pContext = (android::EqualizerContext *) self;
-    int retsize;
 
     if (pContext == NULL || pContext->state == EQUALIZER_STATE_UNINITIALIZED) {
         return -EINVAL;
@@ -750,13 +749,13 @@
         NULL
 };
 
-
+__attribute__ ((visibility ("default")))
 audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
-    tag : AUDIO_EFFECT_LIBRARY_TAG,
-    version : EFFECT_LIBRARY_API_VERSION,
-    name : "Test Equalizer Library",
-    implementor : "The Android Open Source Project",
-    create_effect : android::EffectCreate,
-    release_effect : android::EffectRelease,
-    get_descriptor : android::EffectGetDescriptor,
+    .tag = AUDIO_EFFECT_LIBRARY_TAG,
+    .version = EFFECT_LIBRARY_API_VERSION,
+    .name = "Test Equalizer Library",
+    .implementor = "The Android Open Source Project",
+    .create_effect = android::EffectCreate,
+    .release_effect = android::EffectRelease,
+    .get_descriptor = android::EffectGetDescriptor,
 };
diff --git a/media/libeffects/testlibs/EffectReverb.c b/media/libeffects/testlibs/EffectReverb.c
index fce9bed..efba4f4 100644
--- a/media/libeffects/testlibs/EffectReverb.c
+++ b/media/libeffects/testlibs/EffectReverb.c
@@ -107,6 +107,8 @@
     const effect_descriptor_t *desc;
     int aux = 0;
     int preset = 0;
+    (void)sessionId;
+    (void)ioId;
 
     ALOGV("EffectLibCreateEffect start");
 
@@ -149,7 +151,7 @@
 
     module->context.mState = REVERB_STATE_INITIALIZED;
 
-    ALOGV("EffectLibCreateEffect %p ,size %d", module, sizeof(reverb_module_t));
+    ALOGV("EffectLibCreateEffect %p ,size %zu", module, sizeof(reverb_module_t));
 
     return 0;
 }
@@ -283,7 +285,6 @@
         void *pCmdData, uint32_t *replySize, void *pReplyData) {
     reverb_module_t *pRvbModule = (reverb_module_t *) self;
     reverb_object_t *pReverb;
-    int retsize;
 
     if (pRvbModule == NULL ||
             pRvbModule->context.mState == REVERB_STATE_UNINITIALIZED) {
@@ -758,7 +759,6 @@
     int32_t *pValue32;
     int16_t *pValue16;
     t_reverb_settings *pProperties;
-    int32_t i;
     int32_t temp;
     int32_t temp2;
     uint32_t size;
@@ -1654,7 +1654,6 @@
     int32_t nApOut;
 
     int32_t j;
-    int32_t nEarlyOut;
 
     int32_t tempValue;
 
@@ -2203,6 +2202,7 @@
     return 0;
 }
 
+__attribute__ ((visibility ("default")))
 audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
     .tag = AUDIO_EFFECT_LIBRARY_TAG,
     .version = EFFECT_LIBRARY_API_VERSION,
diff --git a/media/libeffects/testlibs/EffectReverb.h b/media/libeffects/testlibs/EffectReverb.h
index 756c5ea..8f405d4 100644
--- a/media/libeffects/testlibs/EffectReverb.h
+++ b/media/libeffects/testlibs/EffectReverb.h
@@ -443,7 +443,4 @@
 */
 static int ReverbUpdateRoom(reverb_object_t* pReverbData, bool fullUpdate);
 
-
-static int ReverbComputeConstants(reverb_object_t *pReverbData, uint32_t samplingRate);
-
 #endif /*ANDROID_EFFECTREVERB_H_*/
diff --git a/media/liberror/Android.bp b/media/liberror/Android.bp
new file mode 100644
index 0000000..f54d354
--- /dev/null
+++ b/media/liberror/Android.bp
@@ -0,0 +1,67 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library_headers {
+    name: "libexpectedutils_headers",
+    host_supported: true,
+    vendor_available: true,
+    min_sdk_version: "29",
+    export_include_dirs: [
+        "include",
+    ],
+    header_libs: [
+        "libbase_headers",
+        "libutils_headers",
+    ],
+    export_header_lib_headers: [
+        "libbase_headers",
+        "libutils_headers",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.bluetooth",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+}
+
+cc_test_host {
+    name: "libexpectedutils_test",
+    srcs: [
+        "expected_utils_test.cpp",
+    ],
+    shared_libs: [
+        "liblog",
+    ],
+    header_libs: [
+        "libexpectedutils_headers",
+    ],
+}
+
+cc_library_headers {
+    name: "liberror_headers",
+    host_supported: true,
+    vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.bluetooth",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+    header_libs: [
+        "libexpectedutils_headers",
+    ],
+    export_header_lib_headers: [
+        "libexpectedutils_headers",
+    ],
+}
diff --git a/media/liberror/expected_utils_test.cpp b/media/liberror/expected_utils_test.cpp
new file mode 100644
index 0000000..252210a
--- /dev/null
+++ b/media/liberror/expected_utils_test.cpp
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <error/expected_utils.h>
+#include <gtest/gtest.h>
+
+#define LOG_TAG "Result-test"
+
+namespace android {
+namespace foo {
+
+class Value {
+  public:
+    explicit Value(int i) : mInt(i) {}
+    Value(const Value&) = delete;
+    Value(Value&&) = default;
+
+    operator int() const { return mInt; }
+
+  private:
+    const int mInt;
+};
+
+class Status {
+  public:
+    explicit Status(int i) : mInt(i) {}
+    Status(const Status&) = delete;
+    Status(Status&&) = default;
+
+    operator int() const { return mInt; }
+
+  private:
+    const int mInt;
+};
+
+bool errorIsOk(const Status& e) {
+    return e == 0;
+}
+
+std::string errorToString(const Status& e) {
+    std::ostringstream str;
+    str << e;
+    return str.str();
+}
+
+using Result = base::expected<Value, Status>;
+
+}  // namespace foo
+
+namespace {
+
+using foo::Result;
+using foo::Status;
+using foo::Value;
+
+TEST(Result, ValueOrReturnSuccess) {
+    Result result = []() -> Result {
+        Value intermediate = VALUE_OR_RETURN(Result(Value(3)));
+        return Value(intermediate + 1);
+    }();
+    ASSERT_TRUE(result.ok());
+    EXPECT_EQ(4, result.value());
+}
+
+TEST(Result, ValueOrReturnFailure) {
+    Result result = []() -> Result {
+        Value intermediate = VALUE_OR_RETURN(Result(base::unexpected(Status(2))));
+        return Value(intermediate + 1);
+    }();
+    ASSERT_FALSE(result.ok());
+    EXPECT_EQ(2, result.error());
+}
+
+TEST(Result, ValueOrReturnStatusSuccess) {
+    Status status = []() -> Status {
+        Value intermediate = VALUE_OR_RETURN_STATUS(Result(Value(3)));
+        (void) intermediate;
+        return Status(0);
+    }();
+    EXPECT_EQ(0, status);
+}
+
+TEST(Result, ValueOrReturnStatusFailure) {
+    Status status = []() -> Status {
+        Value intermediate = VALUE_OR_RETURN_STATUS(Result(base::unexpected(Status(1))));
+        (void) intermediate;
+        return Status(0);
+    }();
+    EXPECT_EQ(1, status);
+}
+
+TEST(Result, ReturnIfErrorSuccess) {
+    Result result = []() -> Result {
+        RETURN_IF_ERROR(Status(0));
+        return Value(5);
+    }();
+    ASSERT_TRUE(result.ok());
+    EXPECT_EQ(5, result.value());
+}
+
+TEST(Result, ReturnIfErrorFailure) {
+    Result result = []() -> Result {
+        RETURN_IF_ERROR(Status(4));
+        return Value(5);
+    }();
+    ASSERT_FALSE(result.ok());
+    EXPECT_EQ(4, result.error());
+}
+
+TEST(Result, ReturnStatusIfErrorSuccess) {
+    Status status = []() -> Status {
+        RETURN_STATUS_IF_ERROR(Status(0));
+        return Status(7);
+    }();
+    EXPECT_EQ(7, status);
+}
+
+TEST(Result, ReturnStatusIfErrorFailure) {
+    Status status = []() -> Status {
+        RETURN_STATUS_IF_ERROR(Status(3));
+        return Status(0);
+    }();
+    EXPECT_EQ(3, status);
+}
+
+TEST(Result, ValueOrFatalSuccess) {
+    Value value = VALUE_OR_FATAL(Result(Value(7)));
+    EXPECT_EQ(7, value);
+}
+
+TEST(Result, ValueOrFatalFailure) {
+    EXPECT_DEATH(VALUE_OR_FATAL(Result(base::unexpected(Status(3)))), "");
+}
+
+TEST(Result, FatalIfErrorSuccess) {
+    FATAL_IF_ERROR(Status(0));
+}
+
+TEST(Result, FatalIfErrorFailure) {
+    EXPECT_DEATH(FATAL_IF_ERROR(Status(3)), "");
+}
+
+}  // namespace
+}  // namespace android
diff --git a/media/liberror/include/error/Result.h b/media/liberror/include/error/Result.h
new file mode 100644
index 0000000..620e6d0
--- /dev/null
+++ b/media/liberror/include/error/Result.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <error/expected_utils.h>
+#include <utils/Errors.h>
+
+namespace android {
+namespace error {
+
+/**
+ * A convenience short-hand for base::expected, where the error type is a status_t.
+ */
+template <typename T>
+using Result = base::expected<T, status_t>;
+
+}  // namespace error
+}  // namespace android
+
+// Below are the implementations of errorIsOk and errorToString for status_t .
+// This allows status_t to be used in conjunction with the expected_utils.h macros.
+// Unfortuantely, since status_t is merely a typedef for int rather than a unique type, we have to
+// overload these methods for any int, and do so in the global namespace for ADL to work.
+
+inline bool errorIsOk(int status) {
+    return status == android::OK;
+}
+
+inline std::string errorToString(int status) {
+    return android::statusToString(status);
+}
diff --git a/media/liberror/include/error/expected_utils.h b/media/liberror/include/error/expected_utils.h
new file mode 100644
index 0000000..ddc8517
--- /dev/null
+++ b/media/liberror/include/error/expected_utils.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <sstream>
+
+#include <android-base/expected.h>
+#include <log/log_main.h>
+
+/**
+ * Useful macros for working with status codes and base::expected.
+ *
+ * These macros facilitate various kinds of strategies for reduction of error-handling-related
+ * boilerplate. They can be can be classified by the following criteria:
+ * - Whether the argument is a standalone status code vs. base::expected (status or value). In the
+ *   latter case, the macro will evaluate to the contained value in the case of success.
+ * - Whether to FATAL or return in response to an error.
+ *   - In the latter case, whether the enclosing function returns a status code or a base::expected.
+ *
+ * The table below summarizes which macro serves which case, based on those criteria:
+ * +--------------------+------------------+------------------------------------------------------+
+ * |     Error response | FATAL            | Early return                                         |
+ * |                    |                  +---------------------------+--------------------------+
+ * | Expression type    |                  | Function returns expected | Function returns status  |
+ * +--------------------+------------------+---------------------------+--------------------------+
+ * | status code        | FATAL_IF_ERROR() | RETURN_IF_ERROR()         | RETURN_STATUS_IF_ERROR() |
+ * +--------------------+------------------+---------------------------+--------------------------+
+ * | expected           | VALUE_OR_FATAL() | VALUE_OR_RETURN()         | VALUE_OR_RETURN_STATUS() |
+ * +--------------------+------------------+---------------------------+--------------------------+
+ *
+ * All macros expect that:
+ * - The error type and value value type are movable.
+ * - The macro argument can be assigned to a variable using `auto x = (exp)`.
+ * - The expression errorIsOk(e) for the error type evaluatea to a bool which is true iff the
+ *   status is considered success.
+ * - The expression errorToString(e) for a given error type evaluated to a std::string containing a
+ *   human-readable version of the status.
+ */
+
+#define VALUE_OR_RETURN(exp)                                                         \
+    ({                                                                               \
+        auto _tmp = (exp);                                                           \
+        if (!_tmp.ok()) return ::android::base::unexpected(std::move(_tmp.error())); \
+        std::move(_tmp.value());                                                     \
+    })
+
+#define VALUE_OR_RETURN_STATUS(exp)                     \
+    ({                                                  \
+        auto _tmp = (exp);                              \
+        if (!_tmp.ok()) return std::move(_tmp.error()); \
+        std::move(_tmp.value());                        \
+    })
+
+#define VALUE_OR_FATAL(exp)                                                                       \
+    ({                                                                                            \
+        auto _tmp = (exp);                                                                        \
+        LOG_ALWAYS_FATAL_IF(!_tmp.ok(), "Function: %s Line: %d Failed result (%s)", __FUNCTION__, \
+                            __LINE__, errorToString(_tmp.error()).c_str());                       \
+        std::move(_tmp.value());                                                                  \
+    })
+
+#define RETURN_IF_ERROR(exp) \
+    if (auto _tmp = (exp); !errorIsOk(_tmp)) return ::android::base::unexpected(std::move(_tmp));
+
+#define RETURN_STATUS_IF_ERROR(exp) \
+    if (auto _tmp = (exp); !errorIsOk(_tmp)) return _tmp;
+
+#define FATAL_IF_ERROR(exp)                                                                \
+    {                                                                                      \
+        auto _tmp = (exp);                                                                 \
+        LOG_ALWAYS_FATAL_IF(!errorIsOk(_tmp), "Function: %s Line: %d Failed result: (%s)", \
+                            __FUNCTION__, __LINE__, errorToString(_tmp).c_str());         \
+    }
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 63b769e..9d63f9b 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -14,10 +14,12 @@
       "HeadTrackingProcessor.cpp",
       "ModeSelector.cpp",
       "Pose.cpp",
+      "PoseBias.cpp",
       "PoseDriftCompensator.cpp",
       "PoseRateLimiter.cpp",
       "QuaternionUtil.cpp",
       "ScreenHeadFusion.cpp",
+      "StillnessDetector.cpp",
       "Twist.cpp",
     ],
     export_include_dirs: [
@@ -44,6 +46,7 @@
     ],
     export_shared_lib_headers: [
         "libheadtracking",
+        "libsensor",
     ],
 }
 
@@ -66,10 +69,12 @@
         "HeadTrackingProcessor-test.cpp",
         "ModeSelector-test.cpp",
         "Pose-test.cpp",
+        "PoseBias-test.cpp",
         "PoseDriftCompensator-test.cpp",
         "PoseRateLimiter-test.cpp",
         "QuaternionUtil-test.cpp",
         "ScreenHeadFusion-test.cpp",
+        "StillnessDetector-test.cpp",
         "Twist-test.cpp",
     ],
     shared_libs: [
diff --git a/media/libheadtracking/HeadTrackingProcessor.cpp b/media/libheadtracking/HeadTrackingProcessor.cpp
index 47f7cf0..71fae8a 100644
--- a/media/libheadtracking/HeadTrackingProcessor.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor.cpp
@@ -17,9 +17,10 @@
 #include "media/HeadTrackingProcessor.h"
 
 #include "ModeSelector.h"
-#include "PoseDriftCompensator.h"
+#include "PoseBias.h"
 #include "QuaternionUtil.h"
 #include "ScreenHeadFusion.h"
+#include "StillnessDetector.h"
 
 namespace android {
 namespace media {
@@ -32,13 +33,17 @@
   public:
     HeadTrackingProcessorImpl(const Options& options, HeadTrackingMode initialMode)
         : mOptions(options),
-          mHeadPoseDriftCompensator(PoseDriftCompensator::Options{
-                  .translationalDriftTimeConstant = options.translationalDriftTimeConstant,
-                  .rotationalDriftTimeConstant = options.rotationalDriftTimeConstant,
+          mHeadStillnessDetector(StillnessDetector::Options{
+                  .defaultValue = false,
+                  .windowDuration = options.autoRecenterWindowDuration,
+                  .translationalThreshold = options.autoRecenterTranslationalThreshold,
+                  .rotationalThreshold = options.autoRecenterRotationalThreshold,
           }),
-          mScreenPoseDriftCompensator(PoseDriftCompensator::Options{
-                  .translationalDriftTimeConstant = options.translationalDriftTimeConstant,
-                  .rotationalDriftTimeConstant = options.rotationalDriftTimeConstant,
+          mScreenStillnessDetector(StillnessDetector::Options{
+                  .defaultValue = true,
+                  .windowDuration = options.screenStillnessWindowDuration,
+                  .translationalThreshold = options.screenStillnessTranslationalThreshold,
+                  .rotationalThreshold = options.screenStillnessRotationalThreshold,
           }),
           mModeSelector(ModeSelector::Options{.freshnessTimeout = options.freshnessTimeout},
                         initialMode),
@@ -52,7 +57,8 @@
                             const Twist3f& headTwist) override {
         Pose3f predictedWorldToHead =
                 worldToHead * integrate(headTwist, mOptions.predictionDuration);
-        mHeadPoseDriftCompensator.setInput(timestamp, predictedWorldToHead);
+        mHeadPoseBias.setInput(predictedWorldToHead);
+        mHeadStillnessDetector.setInput(timestamp, predictedWorldToHead);
         mWorldToHeadTimestamp = timestamp;
     }
 
@@ -63,8 +69,9 @@
             mPhysicalToLogicalAngle = mPendingPhysicalToLogicalAngle;
         }
 
-        mScreenPoseDriftCompensator.setInput(
-                timestamp, worldToScreen * Pose3f(rotateY(-mPhysicalToLogicalAngle)));
+        Pose3f worldToLogicalScreen = worldToScreen * Pose3f(rotateY(-mPhysicalToLogicalAngle));
+        mScreenPoseBias.setInput(worldToLogicalScreen);
+        mScreenStillnessDetector.setInput(timestamp, worldToLogicalScreen);
         mWorldToScreenTimestamp = timestamp;
     }
 
@@ -77,18 +84,32 @@
     }
 
     void calculate(int64_t timestamp) override {
-        if (mWorldToHeadTimestamp.has_value()) {
-            const Pose3f worldToHead = mHeadPoseDriftCompensator.getOutput();
-            mScreenHeadFusion.setWorldToHeadPose(mWorldToHeadTimestamp.value(), worldToHead);
-            mModeSelector.setWorldToHeadPose(mWorldToHeadTimestamp.value(), worldToHead);
-        }
-
+        // Handle the screen first, since it might trigger a recentering of the head.
         if (mWorldToScreenTimestamp.has_value()) {
-            const Pose3f worldToLogicalScreen = mScreenPoseDriftCompensator.getOutput();
+            const Pose3f worldToLogicalScreen = mScreenPoseBias.getOutput();
+            bool screenStable = mScreenStillnessDetector.calculate(timestamp);
+            mModeSelector.setScreenStable(mWorldToScreenTimestamp.value(), screenStable);
+            // Whenever the screen is unstable, recenter the head pose.
+            if (!screenStable) {
+                recenter(true, false);
+            }
             mScreenHeadFusion.setWorldToScreenPose(mWorldToScreenTimestamp.value(),
                                                    worldToLogicalScreen);
         }
 
+        // Handle head.
+        if (mWorldToHeadTimestamp.has_value()) {
+            Pose3f worldToHead = mHeadPoseBias.getOutput();
+            // Auto-recenter.
+            if (mHeadStillnessDetector.calculate(timestamp)) {
+                recenter(true, false);
+                worldToHead = mHeadPoseBias.getOutput();
+            }
+
+            mScreenHeadFusion.setWorldToHeadPose(mWorldToHeadTimestamp.value(), worldToHead);
+            mModeSelector.setWorldToHeadPose(mWorldToHeadTimestamp.value(), worldToHead);
+        }
+
         auto maybeScreenToHead = mScreenHeadFusion.calculate();
         if (maybeScreenToHead.has_value()) {
             mModeSelector.setScreenToHeadPose(maybeScreenToHead->timestamp,
@@ -113,10 +134,12 @@
 
     void recenter(bool recenterHead, bool recenterScreen) override {
         if (recenterHead) {
-            mHeadPoseDriftCompensator.recenter();
+            mHeadPoseBias.recenter();
+            mHeadStillnessDetector.reset();
         }
         if (recenterScreen) {
-            mScreenPoseDriftCompensator.recenter();
+            mScreenPoseBias.recenter();
+            mScreenStillnessDetector.reset();
         }
 
         // If a sensor being recentered is included in the current mode, apply rate limiting to
@@ -138,8 +161,10 @@
     std::optional<int64_t> mWorldToHeadTimestamp;
     std::optional<int64_t> mWorldToScreenTimestamp;
     Pose3f mHeadToStagePose;
-    PoseDriftCompensator mHeadPoseDriftCompensator;
-    PoseDriftCompensator mScreenPoseDriftCompensator;
+    PoseBias mHeadPoseBias;
+    PoseBias mScreenPoseBias;
+    StillnessDetector mHeadStillnessDetector;
+    StillnessDetector mScreenStillnessDetector;
     ScreenHeadFusion mScreenHeadFusion;
     ModeSelector mModeSelector;
     PoseRateLimiter mRateLimiter;
diff --git a/media/libheadtracking/ModeSelector-test.cpp b/media/libheadtracking/ModeSelector-test.cpp
index 6247d84..a136e6b 100644
--- a/media/libheadtracking/ModeSelector-test.cpp
+++ b/media/libheadtracking/ModeSelector-test.cpp
@@ -44,6 +44,7 @@
     ModeSelector selector(options, HeadTrackingMode::WORLD_RELATIVE);
 
     selector.setWorldToHeadPose(0, worldToHead);
+    selector.setScreenStable(0, true);
     selector.calculate(0);
     EXPECT_EQ(HeadTrackingMode::WORLD_RELATIVE, selector.getActualMode());
     EXPECT_EQ(selector.getHeadToStagePose(), worldToHead.inverse());
@@ -69,14 +70,46 @@
     ModeSelector selector(options);
 
     selector.setScreenToStagePose(screenToStage);
-
     selector.setDesiredMode(HeadTrackingMode::WORLD_RELATIVE);
     selector.setWorldToHeadPose(0, worldToHead);
+    selector.setScreenStable(0, true);
     selector.calculate(0);
     EXPECT_EQ(HeadTrackingMode::WORLD_RELATIVE, selector.getActualMode());
     EXPECT_EQ(selector.getHeadToStagePose(), worldToHead.inverse() * screenToStage);
 }
 
+TEST(ModeSelector, WorldRelativeUnstable) {
+    const Pose3f worldToHead({1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f screenToStage({4, 5, 6}, Quaternionf::UnitRandom());
+
+    ModeSelector::Options options{.freshnessTimeout = 100};
+    ModeSelector selector(options);
+
+    selector.setScreenToStagePose(screenToStage);
+    selector.setDesiredMode(HeadTrackingMode::WORLD_RELATIVE);
+    selector.setWorldToHeadPose(0, worldToHead);
+    selector.setScreenStable(0, false);
+    selector.calculate(10);
+    EXPECT_EQ(HeadTrackingMode::STATIC, selector.getActualMode());
+    EXPECT_EQ(selector.getHeadToStagePose(), screenToStage);
+}
+
+TEST(ModeSelector, WorldRelativeStableStale) {
+    const Pose3f worldToHead({1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f screenToStage({4, 5, 6}, Quaternionf::UnitRandom());
+
+    ModeSelector::Options options{.freshnessTimeout = 100};
+    ModeSelector selector(options);
+
+    selector.setScreenToStagePose(screenToStage);
+    selector.setDesiredMode(HeadTrackingMode::WORLD_RELATIVE);
+    selector.setWorldToHeadPose(100, worldToHead);
+    selector.setScreenStable(0, true);
+    selector.calculate(101);
+    EXPECT_EQ(HeadTrackingMode::STATIC, selector.getActualMode());
+    EXPECT_EQ(selector.getHeadToStagePose(), screenToStage);
+}
+
 TEST(ModeSelector, WorldRelativeStale) {
     const Pose3f worldToHead({1, 2, 3}, Quaternionf::UnitRandom());
     const Pose3f screenToStage({4, 5, 6}, Quaternionf::UnitRandom());
@@ -85,7 +118,6 @@
     ModeSelector selector(options);
 
     selector.setScreenToStagePose(screenToStage);
-
     selector.setDesiredMode(HeadTrackingMode::WORLD_RELATIVE);
     selector.setWorldToHeadPose(0, worldToHead);
     selector.calculate(101);
@@ -101,7 +133,6 @@
     ModeSelector selector(options);
 
     selector.setScreenToStagePose(screenToStage);
-
     selector.setDesiredMode(HeadTrackingMode::SCREEN_RELATIVE);
     selector.setScreenToHeadPose(0, screenToHead);
     selector.calculate(0);
@@ -118,10 +149,10 @@
     ModeSelector selector(options);
 
     selector.setScreenToStagePose(screenToStage);
-
     selector.setDesiredMode(HeadTrackingMode::SCREEN_RELATIVE);
     selector.setScreenToHeadPose(0, screenToHead);
     selector.setWorldToHeadPose(50, worldToHead);
+    selector.setScreenStable(50, true);
     selector.calculate(101);
     EXPECT_EQ(HeadTrackingMode::WORLD_RELATIVE, selector.getActualMode());
     EXPECT_EQ(selector.getHeadToStagePose(), worldToHead.inverse() * screenToStage);
@@ -139,6 +170,7 @@
     selector.setDesiredMode(HeadTrackingMode::SCREEN_RELATIVE);
     selector.setScreenToHeadPose(50, std::nullopt);
     selector.setWorldToHeadPose(50, worldToHead);
+    selector.setScreenStable(50, true);
     selector.calculate(101);
     EXPECT_EQ(HeadTrackingMode::WORLD_RELATIVE, selector.getActualMode());
     EXPECT_EQ(selector.getHeadToStagePose(), worldToHead.inverse() * screenToStage);
diff --git a/media/libheadtracking/ModeSelector.cpp b/media/libheadtracking/ModeSelector.cpp
index 16e1712..cb3a27f 100644
--- a/media/libheadtracking/ModeSelector.cpp
+++ b/media/libheadtracking/ModeSelector.cpp
@@ -41,11 +41,18 @@
     mWorldToHeadTimestamp = timestamp;
 }
 
+void ModeSelector::setScreenStable(int64_t timestamp, bool stable) {
+    mScreenStable = stable;
+    mScreenStableTimestamp = timestamp;
+}
+
 void ModeSelector::calculateActualMode(int64_t timestamp) {
     bool isValidScreenToHead = mScreenToHead.has_value() &&
                                timestamp - mScreenToHeadTimestamp < mOptions.freshnessTimeout;
     bool isValidWorldToHead = mWorldToHead.has_value() &&
                               timestamp - mWorldToHeadTimestamp < mOptions.freshnessTimeout;
+    bool isValidScreenStable = mScreenStable.has_value() &&
+                              timestamp - mScreenStableTimestamp < mOptions.freshnessTimeout;
 
     HeadTrackingMode mode = mDesiredMode;
 
@@ -58,7 +65,7 @@
 
     // Optional downgrade from world-relative to static.
     if (mode == HeadTrackingMode::WORLD_RELATIVE) {
-        if (!isValidWorldToHead) {
+        if (!isValidWorldToHead || !isValidScreenStable || !mScreenStable.value()) {
             mode = HeadTrackingMode::STATIC;
         }
     }
diff --git a/media/libheadtracking/ModeSelector.h b/media/libheadtracking/ModeSelector.h
index 17a5142..e537040 100644
--- a/media/libheadtracking/ModeSelector.h
+++ b/media/libheadtracking/ModeSelector.h
@@ -56,6 +56,7 @@
  *   from screen-relative to world-relative.
  * - When we cannot get a fresh estimate of the world-to-head pose, we will fall back from
  *   world-relative to static.
+ * - In world-relative mode, if the screen is unstable, we will fall back to static.
  *
  * All the timestamps used here are of arbitrary units and origin. They just need to be consistent
  * between all the calls and with the Options provided for determining freshness and rate limiting.
@@ -92,6 +93,12 @@
     void setWorldToHeadPose(int64_t timestamp, const Pose3f& worldToHead);
 
     /**
+     * Set whether the screen is considered stable.
+     * The timestamp needs to reflect how fresh the sample is.
+     */
+     void setScreenStable(int64_t timestamp, bool stable);
+
+    /**
      * Process all the previous inputs and update the outputs.
      */
     void calculate(int64_t timestamp);
@@ -116,6 +123,8 @@
     int64_t mScreenToHeadTimestamp;
     std::optional<Pose3f> mWorldToHead;
     int64_t mWorldToHeadTimestamp;
+    std::optional<bool> mScreenStable;
+    int64_t mScreenStableTimestamp;
 
     HeadTrackingMode mActualMode;
     Pose3f mHeadToStage;
diff --git a/media/libheadtracking/Pose.cpp b/media/libheadtracking/Pose.cpp
index 47241ce..ae39512 100644
--- a/media/libheadtracking/Pose.cpp
+++ b/media/libheadtracking/Pose.cpp
@@ -43,7 +43,7 @@
         return {to, false};
     }
     // Always rate limit if t is 0 (required to avoid division by 0).
-    if (t == 0) {
+    if (t == 0 || maxTranslationalVelocity == 0 || maxRotationalVelocity == 0) {
         return {from, true};
     }
 
diff --git a/media/libheadtracking/PoseBias-test.cpp b/media/libheadtracking/PoseBias-test.cpp
new file mode 100644
index 0000000..9f42a2c
--- /dev/null
+++ b/media/libheadtracking/PoseBias-test.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "PoseBias.h"
+#include "QuaternionUtil.h"
+#include "TestUtil.h"
+
+namespace android {
+namespace media {
+namespace {
+
+using Eigen::Quaternionf;
+using Eigen::Vector3f;
+
+TEST(PoseBias, Initial) {
+    PoseBias bias;
+    EXPECT_EQ(bias.getOutput(), Pose3f());
+}
+
+TEST(PoseBias, Basic) {
+    Pose3f pose1({1, 2, 3}, Quaternionf::UnitRandom());
+    Pose3f pose2({4, 5, 6}, Quaternionf::UnitRandom());
+
+    PoseBias bias;
+    bias.setInput(pose1);
+    EXPECT_EQ(pose1, bias.getOutput());
+    bias.recenter();
+    EXPECT_EQ(bias.getOutput(), Pose3f());
+    bias.setInput(pose2);
+    EXPECT_EQ(bias.getOutput(), pose1.inverse() * pose2);
+    bias.recenter();
+    EXPECT_EQ(bias.getOutput(), Pose3f());
+}
+
+}  // namespace
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/PoseBias.cpp b/media/libheadtracking/PoseBias.cpp
new file mode 100644
index 0000000..33afca6
--- /dev/null
+++ b/media/libheadtracking/PoseBias.cpp
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"){}
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PoseBias.h"
+
+namespace android {
+namespace media {
+
+void PoseBias::setInput(const Pose3f& input) {
+    mLastWorldToInput = input;
+}
+
+void PoseBias::recenter() {
+    mBiasToWorld = mLastWorldToInput.inverse();
+}
+
+Pose3f PoseBias::getOutput() const {
+    return mBiasToWorld * mLastWorldToInput;
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/PoseBias.h b/media/libheadtracking/PoseBias.h
new file mode 100644
index 0000000..9acb49d
--- /dev/null
+++ b/media/libheadtracking/PoseBias.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include "media/Pose.h"
+
+namespace android {
+namespace media {
+
+/**
+ * Biasing for a stream of poses.
+ *
+ * This filter takes a stream of poses and at any time during the stream, can change the frame of
+ * reference for the stream to be that of the last pose received, via the recenter() operation.
+ *
+ * Typical usage:
+ * PoseBias bias;
+ *
+ * bias.setInput(...);
+ * output = bias.getOutput();
+ * bias.setInput(...);
+ * output = bias.getOutput();
+ * bias.setInput(...);
+ * output = bias.getOutput();
+ * bias.recenter();  // Reference frame is now equal to the last input.
+ * output = bias.getOutput();  // This is now the identity pose.
+ *
+ * There doesn't need to be a 1:1 correspondence between setInput() and getOutput() calls.
+ * The initial bias point is identity.
+ *
+ * This implementation is thread-compatible, but not thread-safe.
+ */
+class PoseBias {
+  public:
+    void setInput(const Pose3f& input);
+
+    void recenter();
+
+    Pose3f getOutput() const;
+
+  private:
+    Pose3f mLastWorldToInput;
+    Pose3f mBiasToWorld;
+};
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/PoseProcessingGraph.png b/media/libheadtracking/PoseProcessingGraph.png
index 0363068..325b667 100644
--- a/media/libheadtracking/PoseProcessingGraph.png
+++ b/media/libheadtracking/PoseProcessingGraph.png
Binary files differ
diff --git a/media/libheadtracking/README.md b/media/libheadtracking/README.md
index 3d5b71a..44f7bb2 100644
--- a/media/libheadtracking/README.md
+++ b/media/libheadtracking/README.md
@@ -115,11 +115,9 @@
 #### World
 
 It is sometimes convenient to use an intermediate frame when dealing with
-head-to-screen transforms. The “world” frame is an arbitrary frame of reference
-in the physical world, relative to which we can measure the head pose and screen
-pose. In (very common) cases when we can’t establish such an absolute frame, we
-can take each measurement relative to a separate, arbitrary frame and high-pass
-the result.
+head-to-screen transforms. The “world” frame is a frame of reference in the
+physical world, relative to which we can measure the head pose and screen pose.
+It is arbitrary, but expected to be stable (fixed).
 
 ## Processing Description
 
@@ -133,15 +131,10 @@
 The Predictor block gets pose + twist (pose derivative) and extrapolates to
 obtain a predicted head pose (w/ given latency).
 
-### Drift / Bias Compensator
+### Bias
 
-The Drift / Bias Compensator blocks serve two purposes:
-
-- Compensate for floating reference axes by applying a high-pass filter, which
-  slowly pulls the pose toward identity.
-- Establish the reference frame for the poses by having the ability to set the
-  current pose as the reference for future poses (recentering). Effectively,
-  this is resetting the filter state to identity.
+The Bias blocks establish the reference frame for the poses by having the
+ability to set the current pose as the reference for future poses (recentering).
 
 ### Orientation Compensation
 
@@ -157,6 +150,14 @@
 module may indicate that the user is likely not in front of the screen via the
 “valid” output.
 
+### Stillness Detector
+
+The stillness detector blocks detect when their incoming pose stream has been
+stable for a given amount of time (allowing for a configurable amount of error).
+When the head is considered still, we would trigger a recenter operation
+(“auto-recentering”) and when the screen is considered not still, the mode
+selector would use this information to force static mode.
+
 ### Mode Selector
 
 The Mode Selector block aggregates the various sources of pose information into
@@ -168,7 +169,8 @@
 
 - If the desired mode is static, the actual mode is static.
 - If the desired mode is world-relative:
-    - If head poses are fresh, the actual mode is world-relative.
+    - If head and screen poses are fresh and the screen is stable (stillness
+      detector output is true), the actual mode is world-relative.
     - Otherwise the actual mode is static.
 - If the desired mode is screen-relative:
     - If head and screen poses are fresh and the ‘valid’ signal is asserted, the
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index ec5e1ec..8ebaf6e 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -26,7 +26,6 @@
 
 #include <android-base/thread_annotations.h>
 #include <log/log_main.h>
-#include <sensor/Sensor.h>
 #include <sensor/SensorEventQueue.h>
 #include <sensor/SensorManager.h>
 #include <utils/Looper.h>
@@ -133,14 +132,14 @@
 
         {
             std::lock_guard lock(mMutex);
-            mEnabledSensorFormats.emplace(sensor, format);
+            mEnabledSensorsExtra.emplace(sensor, SensorExtra{ .format = format });
         }
 
         // Enable the sensor.
         if (mQueue->enableSensor(sensor, samplingPeriod.count(), 0, 0)) {
             ALOGE("Failed to enable sensor");
             std::lock_guard lock(mMutex);
-            mEnabledSensorFormats.erase(sensor);
+            mEnabledSensorsExtra.erase(sensor);
             return false;
         }
 
@@ -151,14 +150,14 @@
     void stopSensor(int handle) override {
         mEnabledSensors.erase(handle);
         std::lock_guard lock(mMutex);
-        mEnabledSensorFormats.erase(handle);
+        mEnabledSensorsExtra.erase(handle);
     }
 
   private:
     enum DataFormat {
         kUnknown,
         kQuaternion,
-        kRotationVectorsAndFlags,
+        kRotationVectorsAndDiscontinuityCount,
     };
 
     struct PoseEvent {
@@ -167,13 +166,18 @@
         bool isNewReference;
     };
 
+    struct SensorExtra {
+        DataFormat format;
+        std::optional<int32_t> discontinuityCount;
+    };
+
     sp<Looper> mLooper;
     Listener* const mListener;
     SensorManager* const mSensorManager;
     std::thread mThread;
     std::mutex mMutex;
     std::map<int32_t, SensorEnableGuard> mEnabledSensors;
-    std::map<int32_t, DataFormat> mEnabledSensorFormats GUARDED_BY(mMutex);
+    std::map<int32_t, SensorExtra> mEnabledSensorsExtra GUARDED_BY(mMutex);
     sp<SensorEventQueue> mQueue;
 
     // We must do some of the initialization operations on the worker thread, because the API relies
@@ -248,17 +252,16 @@
     }
 
     void handleEvent(const ASensorEvent& event) {
-        DataFormat format;
+        PoseEvent value;
         {
             std::lock_guard lock(mMutex);
-            auto iter = mEnabledSensorFormats.find(event.sensor);
-            if (iter == mEnabledSensorFormats.end()) {
+            auto iter = mEnabledSensorsExtra.find(event.sensor);
+            if (iter == mEnabledSensorsExtra.end()) {
                 // This can happen if we have any pending events shortly after stopping.
                 return;
             }
-            format = iter->second;
+            value = parseEvent(event, iter->second.format, &iter->second.discontinuityCount);
         }
-        auto value = parseEvent(event, format);
         mListener->onPose(event.timestamp, event.sensor, value.pose, value.twist,
                           value.isNewReference);
     }
@@ -274,14 +277,14 @@
             return DataFormat::kQuaternion;
         }
 
-        if (sensor->getStringType() == "com.google.hardware.sensor.hid_dynamic.headtracker") {
-            return DataFormat::kRotationVectorsAndFlags;
+        if (sensor->getType() == ASENSOR_TYPE_HEAD_TRACKER) {
+            return DataFormat::kRotationVectorsAndDiscontinuityCount;
         }
 
         return DataFormat::kUnknown;
     }
 
-    std::optional<const Sensor> getSensorByHandle(int32_t handle) {
+    std::optional<const Sensor> getSensorByHandle(int32_t handle) override {
         const Sensor* const* list;
         ssize_t size;
 
@@ -313,8 +316,8 @@
         return std::nullopt;
     }
 
-    static PoseEvent parseEvent(const ASensorEvent& event, DataFormat format) {
-        // TODO(ytai): Add more types.
+    static PoseEvent parseEvent(const ASensorEvent& event, DataFormat format,
+                                std::optional<int32_t>* discontinutyCount) {
         switch (format) {
             case DataFormat::kQuaternion: {
                 Eigen::Quaternionf quat(event.data[3], event.data[0], event.data[1], event.data[2]);
@@ -323,19 +326,19 @@
                 return PoseEvent{Pose3f(quat), std::optional<Twist3f>(), false};
             }
 
-            case DataFormat::kRotationVectorsAndFlags: {
-                // Custom sensor, assumed to contain:
-                // 3 floats representing orientation as a rotation vector (in rad).
-                // 3 floats representing angular velocity as a rotation vector (in rad/s).
-                // 1 uint32_t of flags, where:
-                // - LSb is '1' iff the given sample is the first one in a new frame of reference.
-                // - The rest of the bits are reserved for future use.
-                Eigen::Vector3f rotation = {event.data[0], event.data[1], event.data[2]};
-                Eigen::Vector3f twist = {event.data[3], event.data[4], event.data[5]};
+            case DataFormat::kRotationVectorsAndDiscontinuityCount: {
+                Eigen::Vector3f rotation = {event.head_tracker.rx, event.head_tracker.ry,
+                                            event.head_tracker.rz};
+                Eigen::Vector3f twist = {event.head_tracker.vx, event.head_tracker.vy,
+                                         event.head_tracker.vz};
                 Eigen::Quaternionf quat = rotationVectorToQuaternion(rotation);
-                uint32_t flags = *reinterpret_cast<const uint32_t*>(&event.data[6]);
+                bool isNewReference =
+                        !discontinutyCount->has_value() ||
+                        discontinutyCount->value() != event.head_tracker.discontinuity_count;
+                *discontinutyCount = event.head_tracker.discontinuity_count;
+
                 return PoseEvent{Pose3f(quat), Twist3f(Eigen::Vector3f::Zero(), twist),
-                                 (flags & (1 << 0)) != 0};
+                                 isNewReference};
             }
 
             default:
diff --git a/media/libheadtracking/StillnessDetector-test.cpp b/media/libheadtracking/StillnessDetector-test.cpp
new file mode 100644
index 0000000..b6cd479
--- /dev/null
+++ b/media/libheadtracking/StillnessDetector-test.cpp
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "QuaternionUtil.h"
+#include "StillnessDetector.h"
+#include "TestUtil.h"
+
+namespace android {
+namespace media {
+namespace {
+
+using Eigen::Quaternionf;
+using Eigen::Vector3f;
+using Options = StillnessDetector::Options;
+
+class StillnessDetectorTest : public testing::TestWithParam<bool> {
+  public:
+    void SetUp() override { mDefaultValue = GetParam(); }
+
+  protected:
+    bool mDefaultValue;
+};
+
+TEST_P(StillnessDetectorTest, Still) {
+    StillnessDetector detector(Options{.defaultValue = mDefaultValue,
+                                       .windowDuration = 1000,
+                                       .translationalThreshold = 1,
+                                       .rotationalThreshold = 0.05});
+
+    const Pose3f baseline(Vector3f{1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f withinThreshold =
+            baseline * Pose3f(Vector3f(0.3, -0.3, 0), rotateX(0.01) * rotateY(-0.01));
+
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(0, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(300, withinThreshold);
+    EXPECT_EQ(mDefaultValue, detector.calculate(300));
+    detector.setInput(600, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(600));
+    detector.setInput(999, withinThreshold);
+    EXPECT_EQ(mDefaultValue, detector.calculate(999));
+    detector.setInput(1000, baseline);
+    EXPECT_TRUE(detector.calculate(1000));
+}
+
+TEST_P(StillnessDetectorTest, ZeroDuration) {
+    StillnessDetector detector(Options{.defaultValue = mDefaultValue, .windowDuration = 0});
+    EXPECT_TRUE(detector.calculate(0));
+    EXPECT_TRUE(detector.calculate(1000));
+}
+
+TEST_P(StillnessDetectorTest, NotStillTranslation) {
+    StillnessDetector detector(Options{.defaultValue = mDefaultValue,
+                                       .windowDuration = 1000,
+                                       .translationalThreshold = 1,
+                                       .rotationalThreshold = 0.05});
+
+    const Pose3f baseline(Vector3f{1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f withinThreshold =
+            baseline * Pose3f(Vector3f(0.3, -0.3, 0), rotateX(0.01) * rotateY(-0.01));
+    const Pose3f outsideThreshold = baseline * Pose3f(Vector3f(1, 1, 0));
+
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(0, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(300, outsideThreshold);
+    EXPECT_EQ(mDefaultValue, detector.calculate(300));
+    detector.setInput(600, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(600));
+    detector.setInput(1299, withinThreshold);
+    EXPECT_FALSE(detector.calculate(1299));
+    detector.setInput(1300, baseline);
+    EXPECT_TRUE(detector.calculate(1300));
+}
+
+TEST_P(StillnessDetectorTest, NotStillRotation) {
+    StillnessDetector detector(Options{.defaultValue = mDefaultValue,
+                                       .windowDuration = 1000,
+                                       .translationalThreshold = 1,
+                                       .rotationalThreshold = 0.05});
+
+    const Pose3f baseline(Vector3f{1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f withinThreshold =
+            baseline * Pose3f(Vector3f(0.3, -0.3, 0), rotateX(0.03) * rotateY(-0.03));
+    const Pose3f outsideThreshold = baseline * Pose3f(rotateZ(0.06));
+
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(0, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(300, outsideThreshold);
+    EXPECT_EQ(mDefaultValue, detector.calculate(300));
+    detector.setInput(600, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(600));
+    detector.setInput(1299, withinThreshold);
+    EXPECT_FALSE(detector.calculate(1299));
+    detector.setInput(1300, baseline);
+    EXPECT_TRUE(detector.calculate(1300));
+}
+
+TEST_P(StillnessDetectorTest, Suppression) {
+    StillnessDetector detector(Options{.defaultValue = mDefaultValue,
+                                       .windowDuration = 1000,
+                                       .translationalThreshold = 1,
+                                       .rotationalThreshold = 0.05});
+
+    const Pose3f baseline(Vector3f{1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f outsideThreshold = baseline * Pose3f(Vector3f(1.1, 0, 0));
+    const Pose3f middlePoint = baseline * Pose3f(Vector3f(0.55, 0, 0));
+
+    detector.setInput(0, baseline);
+    detector.setInput(1000, baseline);
+    EXPECT_TRUE(detector.calculate(1000));
+    detector.setInput(1100, outsideThreshold);
+    EXPECT_FALSE(detector.calculate(1100));
+    detector.setInput(1500, middlePoint);
+    EXPECT_FALSE(detector.calculate(1500));
+    EXPECT_FALSE(detector.calculate(1999));
+    EXPECT_TRUE(detector.calculate(2000));
+}
+
+TEST_P(StillnessDetectorTest, Reset) {
+    StillnessDetector detector(Options{.defaultValue = mDefaultValue,
+                                       .windowDuration = 1000,
+                                       .translationalThreshold = 1,
+                                       .rotationalThreshold = 0.05});
+
+    const Pose3f baseline(Vector3f{1, 2, 3}, Quaternionf::UnitRandom());
+    const Pose3f withinThreshold =
+            baseline * Pose3f(Vector3f(0.3, -0.3, 0), rotateX(0.01) * rotateY(-0.01));
+    EXPECT_EQ(mDefaultValue, detector.calculate(0));
+    detector.setInput(300, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(300));
+    detector.reset();
+    detector.setInput(600, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(600));
+    detector.setInput(900, withinThreshold);
+    EXPECT_EQ(mDefaultValue, detector.calculate(900));
+    detector.setInput(1200, baseline);
+    EXPECT_EQ(mDefaultValue, detector.calculate(1200));
+    detector.setInput(1599, withinThreshold);
+    EXPECT_EQ(mDefaultValue, detector.calculate(1599));
+    detector.setInput(1600, baseline);
+    EXPECT_TRUE(detector.calculate(1600));
+}
+
+INSTANTIATE_TEST_SUITE_P(StillnessDetectorTestParametrized, StillnessDetectorTest,
+                         testing::Values(false, true));
+
+}  // namespace
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/StillnessDetector.cpp b/media/libheadtracking/StillnessDetector.cpp
new file mode 100644
index 0000000..be7c893
--- /dev/null
+++ b/media/libheadtracking/StillnessDetector.cpp
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "StillnessDetector.h"
+
+namespace android {
+namespace media {
+
+StillnessDetector::StillnessDetector(const Options& options)
+    : mOptions(options), mCosHalfRotationalThreshold(cos(mOptions.rotationalThreshold / 2)) {}
+
+void StillnessDetector::reset() {
+    mFifo.clear();
+    mWindowFull = false;
+    mSuppressionDeadline.reset();
+}
+
+void StillnessDetector::setInput(int64_t timestamp, const Pose3f& input) {
+    mFifo.push_back(TimestampedPose{timestamp, input});
+    discardOld(timestamp);
+}
+
+bool StillnessDetector::calculate(int64_t timestamp) {
+    discardOld(timestamp);
+
+    // Check whether all the poses in the queue are in the proximity of the new one. We want to do
+    // this before checking the overriding conditions below, in order to update the suppression
+    // deadline correctly. We always go from end to start, to find the most recent pose that
+    // violated stillness and update the suppression deadline if it has not been set or if the new
+    // one ends after the current one.
+    bool moved = false;
+
+    if (!mFifo.empty()) {
+        for (auto iter = mFifo.rbegin() + 1; iter != mFifo.rend(); ++iter) {
+            const auto& event = *iter;
+            if (!areNear(event.pose, mFifo.back().pose)) {
+                // Enable suppression for the duration of the window.
+                int64_t deadline = event.timestamp + mOptions.windowDuration;
+                if (!mSuppressionDeadline.has_value() || mSuppressionDeadline.value() < deadline) {
+                    mSuppressionDeadline = deadline;
+                }
+                moved = true;
+                break;
+            }
+        }
+    }
+
+    // If the window has not been full, return the default value.
+    if (!mWindowFull) {
+        return mOptions.defaultValue;
+    }
+
+    // Force "in motion" while the suppression deadline is active.
+    if (mSuppressionDeadline.has_value()) {
+        return false;
+    }
+
+    return !moved;
+}
+
+void StillnessDetector::discardOld(int64_t timestamp) {
+    // Handle the special case of the window duration being zero (always considered full).
+    if (mOptions.windowDuration == 0) {
+        mFifo.clear();
+        mWindowFull = true;
+    }
+
+    // Remove any events from the queue that are older than the window. If there were any such
+    // events we consider the window full.
+    const int64_t windowStart = timestamp - mOptions.windowDuration;
+    while (!mFifo.empty() && mFifo.front().timestamp <= windowStart) {
+        mWindowFull = true;
+        mFifo.pop_front();
+    }
+
+    // Expire the suppression deadline.
+    if (mSuppressionDeadline.has_value() && mSuppressionDeadline <= timestamp) {
+        mSuppressionDeadline.reset();
+    }
+}
+
+bool StillnessDetector::areNear(const Pose3f& pose1, const Pose3f& pose2) const {
+    // Check translation. We use the L1 norm to reduce computational load on expense of accuracy.
+    // The L1 norm is an upper bound for the actual (L2) norm, so this approach will err on the side
+    // of "not near".
+    if ((pose1.translation() - pose2.translation()).lpNorm<1>() > mOptions.translationalThreshold) {
+        return false;
+    }
+
+    // Check orientation.
+    // The angle x between the quaternions is greater than that threshold iff
+    // cos(x/2) < cos(threshold/2).
+    // cos(x/2) can be efficiently calculated as the dot product of both quaternions.
+    if (pose1.rotation().dot(pose2.rotation()) < mCosHalfRotationalThreshold) {
+        return false;
+    }
+
+    return true;
+}
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/StillnessDetector.h b/media/libheadtracking/StillnessDetector.h
new file mode 100644
index 0000000..ee4b2d8
--- /dev/null
+++ b/media/libheadtracking/StillnessDetector.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <deque>
+
+#include <media/Pose.h>
+
+namespace android {
+namespace media {
+
+/**
+ * Given a stream of poses, determines if the pose is stable ("still").
+ * Stillness is defined as all poses in the recent history ("window") being near the most recent
+ * sample.
+ *
+ * Typical usage:
+ *
+ * StillnessDetector detector(StilnessDetector::Options{...});
+ *
+ * while (...) {
+ *    detector.setInput(timestamp, pose);
+ *    bool still = detector.calculate(timestamp);
+ * }
+ *
+ * The detection is not considered reliable until a sufficient number of samples has been provided
+ * for an initial fill-up of the window. During that time, the detector will return whatever default
+ * value has been configured.
+ * The reset() method can be used to empty the window again and get back to this initial state.
+ * In the special case of the window size being 0, the state will always be considered "still".
+ */
+class StillnessDetector {
+  public:
+    /**
+     * Configuration options for the detector.
+     */
+    struct Options {
+        /**
+         * During the initial fill of the window, should we consider the state still?
+         */
+         bool defaultValue;
+        /**
+         * How long is the window, in ticks. The special value of 0 indicates that the stream is
+         * always considered still.
+         */
+        int64_t windowDuration;
+        /**
+         * How much of a translational deviation from the target (in meters) is considered motion.
+         * This is an approximate quantity - the actual threshold might be a little different as we
+         * trade-off accuracy with computational efficiency.
+         */
+        float translationalThreshold;
+        /**
+         * How much of a rotational deviation from the target (in radians) is considered motion.
+         * This is an approximate quantity - the actual threshold might be a little different as we
+         * trade-off accuracy with computational efficiency.
+         */
+        float rotationalThreshold;
+    };
+
+    /** Ctor. */
+    explicit StillnessDetector(const Options& options);
+
+    /** Clear the window. */
+    void reset();
+    /** Push a new sample. */
+    void setInput(int64_t timestamp, const Pose3f& input);
+    /** Calculate whether the stream is still at the given timestamp. */
+    bool calculate(int64_t timestamp);
+
+  private:
+    struct TimestampedPose {
+        int64_t timestamp;
+        Pose3f pose;
+    };
+
+    const Options mOptions;
+    // Precalculated cos(mOptions.rotationalThreshold / 2)
+    const float mCosHalfRotationalThreshold;
+    std::deque<TimestampedPose> mFifo;
+    bool mWindowFull = false;
+    // As soon as motion is detected, this will be set for the time of detection + window duration,
+    // and during this time we will always consider outselves in motion without checking. This is
+    // used for hyteresis purposes, since because of the approximate method we use for determining
+    // stillness, we may toggle back and forth at a rate faster than the window side.
+    std::optional<int64_t> mSuppressionDeadline;
+
+    bool areNear(const Pose3f& pose1, const Pose3f& pose2) const;
+    void discardOld(int64_t timestamp);
+};
+
+}  // namespace media
+}  // namespace android
diff --git a/media/libheadtracking/include/media/HeadTrackingProcessor.h b/media/libheadtracking/include/media/HeadTrackingProcessor.h
index 9fea273..1744be3 100644
--- a/media/libheadtracking/include/media/HeadTrackingProcessor.h
+++ b/media/libheadtracking/include/media/HeadTrackingProcessor.h
@@ -38,10 +38,14 @@
     struct Options {
         float maxTranslationalVelocity = std::numeric_limits<float>::infinity();
         float maxRotationalVelocity = std::numeric_limits<float>::infinity();
-        float translationalDriftTimeConstant = std::numeric_limits<float>::infinity();
-        float rotationalDriftTimeConstant = std::numeric_limits<float>::infinity();
         int64_t freshnessTimeout = std::numeric_limits<int64_t>::max();
         float predictionDuration = 0;
+        int64_t autoRecenterWindowDuration = std::numeric_limits<int64_t>::max();
+        float autoRecenterTranslationalThreshold = std::numeric_limits<float>::infinity();
+        float autoRecenterRotationalThreshold = std::numeric_limits<float>::infinity();
+        int64_t screenStillnessWindowDuration = 0;
+        float screenStillnessTranslationalThreshold = std::numeric_limits<float>::infinity();
+        float screenStillnessRotationalThreshold = std::numeric_limits<float>::infinity();
     };
 
     /** Sets the desired head-tracking mode. */
diff --git a/media/libheadtracking/include/media/SensorPoseProvider.h b/media/libheadtracking/include/media/SensorPoseProvider.h
index d2a6b77..0f42074 100644
--- a/media/libheadtracking/include/media/SensorPoseProvider.h
+++ b/media/libheadtracking/include/media/SensorPoseProvider.h
@@ -20,6 +20,7 @@
 #include <optional>
 
 #include <android/sensor.h>
+#include <sensor/Sensor.h>
 
 #include "Pose.h"
 #include "Twist.h"
@@ -91,6 +92,14 @@
      * @param handle The sensor handle, as provided to startSensor().
      */
     virtual void stopSensor(int32_t handle) = 0;
+
+    /**
+     * Returns the sensor or nullopt if it does not exist.
+     *
+     * The Sensor object has const methods that can be used to
+     * discover properties of the sensor.
+     */
+    virtual std::optional<const Sensor> getSensorByHandle(int32_t handle) = 0;
 };
 
 }  // namespace media
diff --git a/media/libheadtracking/include/media/Twist.h b/media/libheadtracking/include/media/Twist.h
index e2fc203..291cea3 100644
--- a/media/libheadtracking/include/media/Twist.h
+++ b/media/libheadtracking/include/media/Twist.h
@@ -56,6 +56,16 @@
                mRotationalVelocity.isApprox(other.mRotationalVelocity, prec);
     }
 
+    template<typename T>
+    Twist3f operator*(const T& s) const {
+        return Twist3f(mTranslationalVelocity * s, mRotationalVelocity * s);
+    }
+
+    template<typename T>
+    Twist3f operator/(const T& s) const {
+        return Twist3f(mTranslationalVelocity / s, mRotationalVelocity / s);
+    }
+
   private:
     Eigen::Vector3f mTranslationalVelocity;
     Eigen::Vector3f mRotationalVelocity;
diff --git a/media/libheif/Android.bp b/media/libheif/Android.bp
index 6a3427e..55ba61a 100644
--- a/media/libheif/Android.bp
+++ b/media/libheif/Android.bp
@@ -26,7 +26,5 @@
         "-Wall",
     ],
 
-    include_dirs: [],
-
     export_include_dirs: ["include"],
 }
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 4a96e7b..1b8656d 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -15,6 +15,7 @@
  */
 
 //#define LOG_NDEBUG 0
+#include "include/HeifDecoderAPI.h"
 #define LOG_TAG "HeifDecoderImpl"
 
 #include "HeifDecoderImpl.h"
@@ -46,6 +47,7 @@
     info->mRotationAngle = videoFrame->mRotationAngle;
     info->mBytesPerPixel = videoFrame->mBytesPerPixel;
     info->mDurationUs = videoFrame->mDurationUs;
+    info->mBitDepth = videoFrame->mBitDepth;
     if (videoFrame->mIccSize > 0) {
         info->mIccData.assign(
                 videoFrame->getFlattenedIccData(),
@@ -376,13 +378,14 @@
         //       issue (e.g. by copying).
         VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->unsecurePointer());
 
-        ALOGV("Image dimension %dx%d, display %dx%d, angle %d, iccSize %d",
+        ALOGV("Image dimension %dx%d, display %dx%d, angle %d, iccSize %d, bitDepth %d",
                 videoFrame->mWidth,
                 videoFrame->mHeight,
                 videoFrame->mDisplayWidth,
                 videoFrame->mDisplayHeight,
                 videoFrame->mRotationAngle,
-                videoFrame->mIccSize);
+                videoFrame->mIccSize,
+                videoFrame->mBitDepth);
 
         initFrameInfo(&mImageInfo, videoFrame);
 
@@ -471,7 +474,7 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == mOutputColor) {
+    if (heifColor == (HeifColorFormat)mOutputColor) {
         return true;
     }
 
@@ -491,6 +494,11 @@
             mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
             break;
         }
+        case kHeifColorFormat_RGBA_1010102:
+        {
+            mOutputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
+            break;
+        }
         default:
             ALOGE("Unsupported output color format %d", heifColor);
             return false;
@@ -723,4 +731,13 @@
     return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
 }
 
+uint32_t HeifDecoderImpl::getColorDepth() {
+    HeifFrameInfo* info = &mImageInfo;
+    if (info != nullptr) {
+        return mImageInfo.mBitDepth;
+    }
+
+    return 0;
+}
+
 } // namespace android
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index 2b9c710..86a8628 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -54,6 +54,8 @@
 
     size_t skipScanlines(size_t count) override;
 
+    uint32_t getColorDepth() override;
+
 private:
     struct DecodeThread;
 
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
index 9073672..dc12486 100644
--- a/media/libheif/include/HeifDecoderAPI.h
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -23,9 +23,10 @@
  * The output color pixel format of heif decoder.
  */
 typedef enum {
-    kHeifColorFormat_RGB565     = 0,
-    kHeifColorFormat_RGBA_8888  = 1,
-    kHeifColorFormat_BGRA_8888  = 2,
+    kHeifColorFormat_RGB565       = 0,
+    kHeifColorFormat_RGBA_8888    = 1,
+    kHeifColorFormat_BGRA_8888    = 2,
+    kHeifColorFormat_RGBA_1010102 = 3,
 } HeifColorFormat;
 
 /*
@@ -45,7 +46,8 @@
     uint32_t mHeight;
     int32_t  mRotationAngle;           // Rotation angle, clockwise, should be multiple of 90
     uint32_t mBytesPerPixel;           // Number of bytes for one pixel
-    int64_t mDurationUs;               // Duration of the frame in us
+    int64_t  mDurationUs;              // Duration of the frame in us
+    uint32_t mBitDepth;                // Number of bits for each of the R/G/B channels
     std::vector<uint8_t> mIccData;     // ICC data array
 };
 
@@ -161,6 +163,11 @@
      */
     virtual size_t skipScanlines(size_t count) = 0;
 
+    /*
+     * Returns color depth in bits for each of the R/G/B channels.
+     */
+    virtual uint32_t getColorDepth() = 0;
+
 private:
     HeifDecoder(const HeifFrameInfo&) = delete;
     HeifDecoder& operator=(const HeifFrameInfo&) = delete;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index e98d7d8..2dd5784 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -19,6 +19,10 @@
     name: "libmedia_headers",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     export_include_dirs: ["include"],
     header_libs: [
@@ -214,6 +218,11 @@
     name: "libmedia_midiiowrapper",
 
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+
 
     srcs: ["MidiIoWrapper.cpp"],
 
@@ -347,6 +356,7 @@
 
     shared_libs: [
         "android.hidl.token@1.0-utils",
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
         "liblog",
@@ -378,12 +388,12 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
         "framework-permission-aidl-cpp",
     ],
 
     export_static_lib_headers: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
         "framework-permission-aidl-cpp",
     ],
 
@@ -440,6 +450,6 @@
 
     apex_available: [
         "//apex_available:platform",
-        "com.android.media"
+        "com.android.media",
     ],
 }
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index c89c023..c9f361e 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -17,7 +17,6 @@
 
 #include <arpa/inet.h>
 #include <stdint.h>
-#include <sys/types.h>
 
 #include <android/IDataSource.h>
 #include <binder/IPCThreadState.h>
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 67d33fa..85768bd 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -20,12 +20,14 @@
 #define LOG_TAG "MediaProfiles"
 
 #include <stdlib.h>
+#include <utils/misc.h>
 #include <utils/Log.h>
 #include <utils/Vector.h>
 #include <cutils/properties.h>
 #include <expat.h>
 #include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <OMX_Video.h>
 #include <sys/stat.h>
 
@@ -86,7 +88,24 @@
     {"h263", VIDEO_ENCODER_H263},
     {"h264", VIDEO_ENCODER_H264},
     {"m4v",  VIDEO_ENCODER_MPEG_4_SP},
-    {"hevc", VIDEO_ENCODER_HEVC}
+    {"vp8",  VIDEO_ENCODER_VP8},
+    {"hevc", VIDEO_ENCODER_HEVC},
+    {"vp9",  VIDEO_ENCODER_VP9},
+    {"dolbyvision", VIDEO_ENCODER_DOLBY_VISION},
+};
+
+const MediaProfiles::NameToTagMap MediaProfiles::sChromaSubsamplingNameMap[] = {
+    {"yuv 4:2:0", CHROMA_SUBSAMPLING_YUV_420},
+    {"yuv 4:2:2", CHROMA_SUBSAMPLING_YUV_422},
+    {"yuv 4:4:4", CHROMA_SUBSAMPLING_YUV_444},
+};
+
+const MediaProfiles::NameToTagMap MediaProfiles::sHdrFormatNameMap[] = {
+    {"sdr", HDR_FORMAT_NONE},
+    {"hlg", HDR_FORMAT_HLG},
+    {"hdr10", HDR_FORMAT_HDR10},
+    {"hdr10+", HDR_FORMAT_HDR10PLUS},
+    {"dolbyvision", HDR_FORMAT_DOLBY_VISION},
 };
 
 const MediaProfiles::NameToTagMap MediaProfiles::sAudioEncoderNameMap[] = {
@@ -164,12 +183,18 @@
 MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec UNUSED)
 {
     ALOGV("video codec:");
-    ALOGV("codec = %d", codec.mCodec);
+    ALOGV("codec = %d (%s)", codec.mCodec,
+            findNameForTag(sVideoEncoderNameMap, NELEM(sVideoEncoderNameMap), codec.mCodec));
     ALOGV("bit rate: %d", codec.mBitRate);
     ALOGV("frame width: %d", codec.mFrameWidth);
     ALOGV("frame height: %d", codec.mFrameHeight);
     ALOGV("frame rate: %d", codec.mFrameRate);
     ALOGV("profile: %d", codec.mProfile);
+    ALOGV("chroma: %s", findNameForTag(sChromaSubsamplingNameMap, NELEM(sChromaSubsamplingNameMap),
+                                       codec.mChromaSubsampling));
+    ALOGV("bit depth: %d", codec.mBitDepth);
+    ALOGV("hdr format: %s", findNameForTag(sHdrFormatNameMap, NELEM(sHdrFormatNameMap),
+                                           codec.mHdrFormat));
 }
 
 /*static*/ void
@@ -232,6 +257,155 @@
     return tag;
 }
 
+/*static*/ const char *
+MediaProfiles::findNameForTag(
+        const MediaProfiles::NameToTagMap *map, size_t nMappings, int tag, const char *def_)
+{
+    for (size_t i = 0; i < nMappings; ++i) {
+        if (map[i].tag == tag) {
+            return map[i].name;
+        }
+    }
+    return def_;
+}
+
+/*static*/ bool
+MediaProfiles::detectAdvancedVideoProfile(
+        video_encoder codec, int profile,
+        chroma_subsampling *chroma, int *bitDepth, hdr_format *hdr)
+{
+    // default values
+    *chroma = CHROMA_SUBSAMPLING_YUV_420;
+    *bitDepth = 8;
+    *hdr = HDR_FORMAT_NONE;
+
+    switch (codec) {
+    case VIDEO_ENCODER_H263:
+    case VIDEO_ENCODER_MPEG_4_SP:
+    case VIDEO_ENCODER_VP8:
+        // these are always 4:2:0 SDR 8-bit
+        return true;
+
+    case VIDEO_ENCODER_H264:
+        switch (profile) {
+        case AVCProfileBaseline:
+        case AVCProfileConstrainedBaseline:
+        case AVCProfileMain:
+        case AVCProfileExtended:
+        case AVCProfileHigh:
+        case AVCProfileConstrainedHigh:
+            return true;
+        case AVCProfileHigh10:
+            // returning false here as this could be an HLG stream
+            *bitDepth = 10;
+            return false;
+        case AVCProfileHigh422:
+            *chroma = CHROMA_SUBSAMPLING_YUV_422;
+            // returning false here as bit-depth could be 8 or 10
+            return false;
+        case AVCProfileHigh444:
+            *chroma = CHROMA_SUBSAMPLING_YUV_444;
+            // returning false here as bit-depth could be 8 or 10
+            return false;
+        default:
+            return false;
+        }
+        // flow does not get here
+
+    case VIDEO_ENCODER_HEVC:
+        switch (profile) {
+        case HEVCProfileMain:
+            return true;
+        case HEVCProfileMain10:
+            *bitDepth = 10;
+            // returning false here as this could be an HLG stream
+            return false;
+        case HEVCProfileMain10HDR10:
+            *bitDepth = 10;
+            *hdr = HDR_FORMAT_HDR10;
+            return true;
+        case HEVCProfileMain10HDR10Plus:
+            *bitDepth = 10;
+            *hdr = HDR_FORMAT_HDR10PLUS;
+            return true;
+        default:
+            return false;
+        }
+        // flow does not get here
+
+    case VIDEO_ENCODER_VP9:
+        switch (profile) {
+        case VP9Profile0:
+            return true;
+        case VP9Profile2:
+            // this is always 10-bit on Android */
+            *bitDepth = 10;
+            // returning false here as this could be an HLG stream
+            return false;
+        case VP9Profile2HDR:
+            // this is always 10-bit on Android */
+            *bitDepth = 10;
+            *hdr = HDR_FORMAT_HDR10;
+            return true;
+        case VP9Profile2HDR10Plus:
+            *bitDepth = 10;
+            *hdr = HDR_FORMAT_HDR10PLUS;
+            return true;
+        default:
+            return false;
+        }
+        // flow does not get here
+
+    case VIDEO_ENCODER_DOLBY_VISION:
+    {
+        // for Dolby Vision codec we always assume 10-bit DV
+        *bitDepth = 10;
+        *hdr = HDR_FORMAT_DOLBY_VISION;
+
+        switch (profile) {
+        case DolbyVisionProfileDvheDer /* profile 2 deprecated */:
+        case DolbyVisionProfileDvheDen /* profile 3 deprecated */:
+        case DolbyVisionProfileDvavPer /* profile 0 deprecated */:
+        case DolbyVisionProfileDvavPen /* profile 1 deprecated */:
+        case DolbyVisionProfileDvheDtr /* dvhe.04 */:
+        case DolbyVisionProfileDvheStn /* dvhe.05 */:
+        case DolbyVisionProfileDvheDth /* profile 6 deprecated */:
+        case DolbyVisionProfileDvheDtb /* dvhe.07 */:
+        case DolbyVisionProfileDvheSt  /* dvhe.08 */:
+        case DolbyVisionProfileDvavSe  /* dvav.09 */:
+        case DolbyVisionProfileDvav110 /* dvav1.10 */:
+            return true;
+        default:
+            return false;
+        }
+        // flow does not get here
+    }
+
+    case VIDEO_ENCODER_AV1:
+        switch (profile) {
+        case AV1ProfileMain10:
+            *bitDepth = 10;
+            // returning false here as this could be an HLG stream
+            return false;
+        case AV1ProfileMain10HDR10:
+            *bitDepth = 10;
+            *hdr = HDR_FORMAT_HDR10;
+            return true;
+        case AV1ProfileMain10HDR10Plus:
+            *bitDepth = 10;
+            *hdr = HDR_FORMAT_HDR10PLUS;
+            return true;
+        default:
+            return false;
+        }
+        // flow does not get here
+
+    default:
+        return false;
+    }
+    // flow does not get here
+}
+
 /*static*/ void
 MediaProfiles::createVideoCodec(const char **atts, size_t natts, MediaProfiles *profiles)
 {
@@ -250,13 +424,56 @@
     }
 
     int profile = -1;
-    if (natts >= 12 && !strcmp("profile", atts[10])) {
-        profile = atoi(atts[11]);
+    chroma_subsampling chroma = CHROMA_SUBSAMPLING_YUV_420;
+    int bitDepth = 8;
+    hdr_format hdr = HDR_FORMAT_NONE;
+    if (codec == VIDEO_ENCODER_DOLBY_VISION) {
+        bitDepth = 10;
+        hdr = HDR_FORMAT_DOLBY_VISION;
     }
 
-    VideoCodec videoCodec {
+    if (natts >= 12 && !strcmp("profile", atts[10])) {
+        profile = atoi(atts[11]);
+        if (!detectAdvancedVideoProfile(
+                (video_encoder)codec, profile, &chroma, &bitDepth, &hdr)) {
+            // if not detected read values from the attributes
+            for (size_t ix = 12; natts >= ix + 2; ix += 2) {
+                if (!strcmp("chroma", atts[ix])) {
+                    int chromaTag = findTagForName(sChromaSubsamplingNameMap,
+                                         NELEM(sChromaSubsamplingNameMap), atts[ix + 1]);
+                    if (chromaTag == -1) {
+                        ALOGE("MediaProfiles::createVideoCodec invalid chroma %s", atts[ix + 1]);
+                        return;
+                    } else {
+                        chroma = (chroma_subsampling)chromaTag;
+                    }
+                } else if (!strcmp("bitDepth", atts[ix])) {
+                    bitDepth = atoi(atts[ix + 1]);
+                    if (bitDepth < 8 || bitDepth > 16) {
+                        ALOGE("MediaProfiles::createVideoCodec invalid bidDepth %s", atts[ix + 1]);
+                        return;
+                    }
+                } else if (!strcmp("hdr", atts[ix])) {
+                    int hdrTag = findTagForName(sHdrFormatNameMap,
+                                                NELEM(sHdrFormatNameMap), atts[ix + 1]);
+                    if (hdrTag == -1) {
+                        ALOGE("MediaProfiles::createVideoCodec invalid hdr %s", atts[ix + 1]);
+                        return;
+                    } else {
+                        hdr = (hdr_format)hdrTag;
+                    }
+                } else {
+                    // ignoring here. TODO: rewrite this whole file to ignore invalid attrs
+                    ALOGD("MediaProfiles::createVideoCodec ignoring invalid attr %s", atts[ix]);
+                }
+            }
+        }
+    }
+
+    VideoCodec videoCodec{
             static_cast<video_encoder>(codec),
-            atoi(atts[3]), atoi(atts[5]), atoi(atts[7]), atoi(atts[9]), profile };
+            atoi(atts[3]) /* bitRate */, atoi(atts[5]) /* width */, atoi(atts[7]) /* height */,
+            atoi(atts[9]) /* frameRate */, profile, chroma, bitDepth, hdr };
     logVideoCodec(videoCodec);
 
     size_t nCamcorderProfiles;
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index ec52a49..a6f0b60 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -43,10 +43,10 @@
 }
 
 //static
-MediaResource MediaResource::CodecResource(bool secure, bool video, int64_t instanceCount) {
+MediaResource MediaResource::CodecResource(bool secure, SubType subType, int64_t instanceCount) {
     return MediaResource(
             secure ? Type::kSecureCodec : Type::kNonSecureCodec,
-            video ? SubType::kVideoCodec : SubType::kAudioCodec,
+            subType,
             instanceCount);
 }
 
diff --git a/media/libmedia/include/media/MediaProfiles.h b/media/libmedia/include/media/MediaProfiles.h
index 4a898e2..e75b694 100644
--- a/media/libmedia/include/media/MediaProfiles.h
+++ b/media/libmedia/include/media/MediaProfiles.h
@@ -81,6 +81,19 @@
     AUDIO_DECODER_WMA,
 };
 
+enum chroma_subsampling {
+    CHROMA_SUBSAMPLING_YUV_420,
+    CHROMA_SUBSAMPLING_YUV_422,
+    CHROMA_SUBSAMPLING_YUV_444,
+};
+
+enum hdr_format {
+    HDR_FORMAT_NONE,
+    HDR_FORMAT_HLG,
+    HDR_FORMAT_HDR10,
+    HDR_FORMAT_HDR10PLUS,
+    HDR_FORMAT_DOLBY_VISION,
+};
 
 class MediaProfiles
 {
@@ -117,13 +130,19 @@
          * @param profile codec profile (for MediaCodec) or -1 for none
          */
         VideoCodec(video_encoder codec, int bitrate, int frameWidth, int frameHeight, int frameRate,
-                   int profile = -1)
+                   int profile = -1,
+                   chroma_subsampling chroma = CHROMA_SUBSAMPLING_YUV_420,
+                   int bitDepth = 8,
+                   hdr_format hdr = HDR_FORMAT_NONE)
             : mCodec(codec),
               mBitRate(bitrate),
               mFrameWidth(frameWidth),
               mFrameHeight(frameHeight),
               mFrameRate(frameRate),
-              mProfile(profile) {
+              mProfile(profile),
+              mChromaSubsampling(chroma),
+              mBitDepth(bitDepth),
+              mHdrFormat(hdr) {
         }
 
         VideoCodec(const VideoCodec&) = default;
@@ -160,6 +179,21 @@
             return mProfile;
         }
 
+        /** Returns the chroma subsampling. */
+        chroma_subsampling getChromaSubsampling() const {
+            return mChromaSubsampling;
+        }
+
+        /** Returns the bit depth. */
+        int getBitDepth() const {
+            return mBitDepth;
+        }
+
+        /** Returns the chroma subsampling. */
+        hdr_format getHdrFormat() const {
+            return mHdrFormat;
+        }
+
     private:
         video_encoder mCodec;
         int mBitRate;
@@ -167,6 +201,9 @@
         int mFrameHeight;
         int mFrameRate;
         int mProfile;
+        chroma_subsampling mChromaSubsampling;
+        int mBitDepth;
+        hdr_format mHdrFormat;
         friend class MediaProfiles;
     };
 
@@ -533,6 +570,39 @@
     static int findTagForName(const NameToTagMap *map, size_t nMappings, const char *name);
 
     /**
+     * Finds the string representation for an integer enum tag.
+     *
+     * This is the reverse for findTagForName
+     *
+     * @param map       the name-to-tag map to search
+     * @param nMappings the number of mappings in |map|
+     * @param tag       the enum value to find
+     * @param def_      the return value if the enum is not found
+     *
+     * @return the string name corresponding to |tag| or |def_| if not found.
+     */
+    static const char *findNameForTag(
+            const NameToTagMap *map, size_t nMappings,
+            int tag, const char *def_ = "(unknown)");
+
+    /**
+     * Updates the chroma subsampling, bit-depth and hdr-format for
+     * advanced codec profiles.
+     *
+     * @param codec    the video codec type
+     * @param profile  the MediaCodec profile
+     * @param chroma   pointer to the chroma subsampling output
+     * @param bitDepth pointer to the bit depth output
+     * @param hdr      pointer to the hdr format output
+     *
+     * @return true, if the profile fully determined chroma, bit-depth and hdr-format, false
+     *         otherwise.
+     */
+    static bool detectAdvancedVideoProfile(
+            video_encoder codec, int profile,
+            chroma_subsampling *chroma, int *bitDepth, hdr_format *hdr);
+
+    /**
      * Check on existing profiles with the following criteria:
      * 1. Low quality profile must have the lowest video
      *    resolution product (width x height)
@@ -549,6 +619,8 @@
 
     // Mappings from name (for instance, codec name) to enum value
     static const NameToTagMap sVideoEncoderNameMap[];
+    static const NameToTagMap sChromaSubsamplingNameMap[];
+    static const NameToTagMap sHdrFormatNameMap[];
     static const NameToTagMap sAudioEncoderNameMap[];
     static const NameToTagMap sFileFormatMap[];
     static const NameToTagMap sVideoDecoderNameMap[];
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index 4712528..3b69d4f 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -37,7 +37,8 @@
     MediaResource(Type type, SubType subType, int64_t value);
     MediaResource(Type type, const std::vector<uint8_t> &id, int64_t value);
 
-    static MediaResource CodecResource(bool secure, bool video, int64_t instanceCount = 1);
+    static MediaResource CodecResource(bool secure, MediaResourceSubType subType,
+            int64_t instanceCount = 1);
     static MediaResource GraphicMemoryResource(int64_t value);
     static MediaResource CpuBoostResource();
     static MediaResource VideoBatteryResource();
@@ -62,6 +63,7 @@
         case MediaResource::SubType::kUnspecifiedSubType: return "unspecified";
         case MediaResource::SubType::kAudioCodec:         return "audio-codec";
         case MediaResource::SubType::kVideoCodec:         return "video-codec";
+        case MediaResource::SubType::kImageCodec:         return "image-codec";
         default:                                 return def;
     }
 }
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index d54ff32..dd18144 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -108,7 +108,9 @@
     VIDEO_ENCODER_MPEG_4_SP = 3,
     VIDEO_ENCODER_VP8 = 4,
     VIDEO_ENCODER_HEVC = 5,
-
+    VIDEO_ENCODER_VP9 = 6,
+    VIDEO_ENCODER_DOLBY_VISION = 7,
+    VIDEO_ENCODER_AV1 = 8,
     VIDEO_ENCODER_LIST_END // must be the last - used to validate the video encoder type
 };
 
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1c9b9e4..5215c1b 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -949,6 +949,9 @@
         mVideoWidth = ext1;
         mVideoHeight = ext2;
         break;
+    case MEDIA_STARTED:
+        ALOGV("Received media started message");
+        break;
     case MEDIA_NOTIFY_TIME:
         ALOGV("Received notify time message");
         break;
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
index 7dd0caa..2ed3126 100644
--- a/media/libmedia/tests/codeclist/Android.bp
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -25,9 +25,25 @@
 
 cc_test {
     name: "CodecListTest",
-    test_suites: ["device-tests"],
+    test_suites: ["device-tests", "mts"],
     gtest: true,
 
+    // Support multilib variants (using different suffix per sub-architecture), which is needed on
+    // build targets with secondary architectures, as the MTS test suite packaging logic flattens
+    // all test artifacts into a single `testcases` directory.
+    compile_multilib: "both",
+    multilib: {
+        lib32: {
+            suffix: "32",
+        },
+        lib64: {
+            suffix: "64",
+        },
+    },
+
+    // used within mainline MTS, but only to R, not to Q.
+    min_sdk_version: "30",
+
     srcs: [
         "CodecListTest.cpp",
     ],
@@ -35,7 +51,7 @@
     shared_libs: [
         "libbinder",
         "liblog",
-        "libmedia_codeclist",
+        "libmedia_codeclist", // available >= R
         "libstagefright",
         "libstagefright_foundation",
         "libstagefright_xmlparser",
diff --git a/media/libmedia/tests/codeclist/AndroidTest.xml b/media/libmedia/tests/codeclist/AndroidTest.xml
new file mode 100644
index 0000000..eeaab8e
--- /dev/null
+++ b/media/libmedia/tests/codeclist/AndroidTest.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for CodecList unit tests">
+    <option name="test-suite-tag" value="CodecListTest" />
+    <object type="module_controller" class="com.android.tradefed.testtype.suite.module.Sdk30ModuleController" />
+
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
+        <option name="cleanup" value="true" />
+        <option name="append-bitness" value="true" />
+        <option name="push" value="CodecListTest->/data/local/tmp/CodecListTest" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="CodecListTest" />
+    </test>
+
+
+</configuration>
diff --git a/media/libmedia/xsd/vts/OWNERS b/media/libmedia/xsd/vts/OWNERS
new file mode 100644
index 0000000..9af2eba
--- /dev/null
+++ b/media/libmedia/xsd/vts/OWNERS
@@ -0,0 +1,2 @@
+# Bug component: 151862
+sundongahn@google.com
diff --git a/media/libmediaformatshaper/Android.bp b/media/libmediaformatshaper/Android.bp
index bdd1465..7e8f351 100644
--- a/media/libmediaformatshaper/Android.bp
+++ b/media/libmediaformatshaper/Android.bp
@@ -95,10 +95,10 @@
 
     min_sdk_version: "29",
 
-    apex_available: [
-        "//apex_available:platform",
-        "com.android.media",
-    ],
+    // the library lives only in the module
+    // framework accesses with dlopen() and uses "libmediaformatshaper_headers" so both
+    // sides track to the interface.
+    apex_available: ["com.android.media"],
 
     version_script: "exports.lds",
 
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 9b54199..165a8ad 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -20,7 +20,7 @@
     },
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth.updatable",
+        "com.android.bluetooth",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -29,6 +29,7 @@
 cc_library {
     name: "libmedia_helper",
     vendor_available: true,
+    min_sdk_version: "29",
     vndk: {
         enabled: true,
     },
@@ -43,7 +44,10 @@
         "-Wextra",
         "-Wall",
     ],
-    shared_libs: ["libutils", "liblog"],
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
     header_libs: [
         "libmedia_helper_headers",
         "libaudio_system_headers",
@@ -51,11 +55,16 @@
     export_header_lib_headers: [
         "libmedia_helper_headers",
     ],
-    clang: true,
+
     host_supported: true,
     target: {
         darwin: {
             enabled: false,
         },
     },
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "test_com.android.media",
+    ],
 }
diff --git a/media/libmediahelper/AudioValidator.cpp b/media/libmediahelper/AudioValidator.cpp
index 7eddbe1..5a0d517 100644
--- a/media/libmediahelper/AudioValidator.cpp
+++ b/media/libmediahelper/AudioValidator.cpp
@@ -47,8 +47,7 @@
         const effect_descriptor_t& desc, std::string_view bugNumber)
 {
     status_t status = NO_ERROR;
-    if (checkStringOverflow(desc.name)
-        | /* always */ checkStringOverflow(desc.implementor)) {
+    if (checkStringOverflow(desc.name) || checkStringOverflow(desc.implementor)) {
         status = BAD_VALUE;
     }
     return safetyNetLog(status, bugNumber);
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index 97b5b95..e29364c 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -30,6 +30,8 @@
     MAKE_STRING_FROM_ENUM(AUDIO_MODE_IN_CALL),
     MAKE_STRING_FROM_ENUM(AUDIO_MODE_IN_COMMUNICATION),
     MAKE_STRING_FROM_ENUM(AUDIO_MODE_CALL_SCREEN),
+    MAKE_STRING_FROM_ENUM(AUDIO_MODE_CALL_REDIRECT),
+    MAKE_STRING_FROM_ENUM(AUDIO_MODE_COMMUNICATION_REDIRECT),
     TERMINATOR
 };
 
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index d758391..8a38dd7 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -65,6 +65,7 @@
         "//frameworks/base/apex/media/framework",
         "//frameworks/base/core/jni",
         "//frameworks/base/media/jni",
+	"//packages/modules/Media/apex/framework",
     ],
 }
 
diff --git a/media/libmediametrics/IMediaMetricsService.cpp b/media/libmediametrics/IMediaMetricsService.cpp
deleted file mode 100644
index b5675e6..0000000
--- a/media/libmediametrics/IMediaMetricsService.cpp
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "MediaMetrics"
-
-#include <stdint.h>
-#include <inttypes.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-#include <binder/IPCThreadState.h>
-
-#include <utils/Errors.h>  // for status_t
-#include <utils/List.h>
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-#include <media/MediaMetricsItem.h>
-#include <media/IMediaMetricsService.h>
-
-namespace android {
-
-// TODO: Currently ONE_WAY transactions, make both ONE_WAY and synchronous options.
-
-enum {
-    SUBMIT_ITEM = IBinder::FIRST_CALL_TRANSACTION,
-    SUBMIT_BUFFER,
-};
-
-class BpMediaMetricsService: public BpInterface<IMediaMetricsService>
-{
-public:
-    explicit BpMediaMetricsService(const sp<IBinder>& impl)
-        : BpInterface<IMediaMetricsService>(impl)
-    {
-    }
-
-    status_t submit(mediametrics::Item *item) override
-    {
-        if (item == nullptr) {
-            return BAD_VALUE;
-        }
-        ALOGV("%s: (ONEWAY) item=%s", __func__, item->toString().c_str());
-
-        Parcel data;
-        data.writeInterfaceToken(IMediaMetricsService::getInterfaceDescriptor());
-
-        status_t status = item->writeToParcel(&data);
-        if (status != NO_ERROR) { // assume failure logged in item
-            return status;
-        }
-
-        status = remote()->transact(
-                SUBMIT_ITEM, data, nullptr /* reply */, IBinder::FLAG_ONEWAY);
-        ALOGW_IF(status != NO_ERROR, "%s: bad response from service for submit, status=%d",
-                __func__, status);
-        return status;
-    }
-
-    status_t submitBuffer(const char *buffer, size_t length) override
-    {
-        if (buffer == nullptr || length > INT32_MAX) {
-            return BAD_VALUE;
-        }
-        ALOGV("%s: (ONEWAY) length:%zu", __func__, length);
-
-        Parcel data;
-        data.writeInterfaceToken(IMediaMetricsService::getInterfaceDescriptor());
-
-        status_t status = data.writeInt32(length)
-                ?: data.write((uint8_t*)buffer, length);
-        if (status != NO_ERROR) {
-            return status;
-        }
-
-        status = remote()->transact(
-                SUBMIT_BUFFER, data, nullptr /* reply */, IBinder::FLAG_ONEWAY);
-        ALOGW_IF(status != NO_ERROR, "%s: bad response from service for submit, status=%d",
-                __func__, status);
-        return status;
-    }
-};
-
-IMPLEMENT_META_INTERFACE(MediaMetricsService, "android.media.IMediaMetricsService");
-
-// ----------------------------------------------------------------------
-
-status_t BnMediaMetricsService::onTransact(
-    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
-    switch (code) {
-    case SUBMIT_ITEM: {
-        CHECK_INTERFACE(IMediaMetricsService, data, reply);
-
-        mediametrics::Item * const item = mediametrics::Item::create();
-        status_t status = item->readFromParcel(data);
-        if (status != NO_ERROR) { // assume failure logged in item
-            return status;
-        }
-        status = submitInternal(item, true /* release */);
-        // assume failure logged by submitInternal
-        return NO_ERROR;
-    }
-    case SUBMIT_BUFFER: {
-        CHECK_INTERFACE(IMediaMetricsService, data, reply);
-        int32_t length;
-        status_t status = data.readInt32(&length);
-        if (status != NO_ERROR || length <= 0) {
-            return BAD_VALUE;
-        }
-        const void *ptr = data.readInplace(length);
-        if (ptr == nullptr) {
-            return BAD_VALUE;
-        }
-        status = submitBuffer(static_cast<const char *>(ptr), length);
-        // assume failure logged by submitBuffer
-        return NO_ERROR;
-    }
-
-    default:
-        return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 57fc49d..ecb248d 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -26,6 +26,7 @@
 #include <unordered_map>
 
 #include <binder/Parcel.h>
+#include <cutils/multiuser.h>
 #include <cutils/properties.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -343,7 +344,8 @@
         // now.
         // TODO(b/190151205): Either allow the HotwordDetectionService to access MediaMetrics or
         // make this disabling specific to that process.
-        if (uid >= AID_ISOLATED_START && uid <= AID_ISOLATED_END) {
+        uid_t appid = multiuser_get_app_id(uid);
+        if (appid >= AID_ISOLATED_START && appid <= AID_ISOLATED_END) {
             return false;
         }
         break;
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 2bf72a7..1c30510 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -25,6 +25,9 @@
  * 2) Consistent behavior and documentation.
  */
 
+#define AMEDIAMETRICS_INITIAL_MAX_VOLUME (0.f)
+#define AMEDIAMETRICS_INITIAL_MIN_VOLUME (1.f)
+
 /*
  * Taxonomy of audio keys
  *
@@ -48,6 +51,12 @@
 // The AudioRecord key appends the "trackId" to the prefix.
 #define AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD AMEDIAMETRICS_KEY_PREFIX_AUDIO "record."
 
+// The Audio Sensor key appends the sensor handle integer.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR AMEDIAMETRICS_KEY_PREFIX_AUDIO "sensor."
+
+// The Audio Spatializer key appends the spatializerId (currently 0)
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER AMEDIAMETRICS_KEY_PREFIX_AUDIO "spatializer."
+
 // The AudioStream key appends the "streamId" to the prefix.
 #define AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM  AMEDIAMETRICS_KEY_PREFIX_AUDIO "stream."
 
@@ -63,6 +72,7 @@
 
 // Error keys
 #define AMEDIAMETRICS_KEY_AUDIO_TRACK_ERROR   AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "error"
+#define AMEDIAMETRICS_KEY_AUDIO_RECORD_ERROR  AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "error"
 
 /*
  * MediaMetrics Properties are unified space for consistency and readability.
@@ -115,6 +125,11 @@
 #define AMEDIAMETRICS_PROP_DEVICETIMENS   "deviceTimeNs"   // int64_t playback/record time
 #define AMEDIAMETRICS_PROP_DEVICEVOLUME   "deviceVolume"   // double - average device volume
 
+#define AMEDIAMETRICS_PROP_DEVICEMAXVOLUMEDURATIONNS "deviceMaxVolumeDurationNs" // int64_t
+#define AMEDIAMETRICS_PROP_DEVICEMAXVOLUME "deviceMaxVolume" // double - maximum device volume
+#define AMEDIAMETRICS_PROP_DEVICEMINVOLUMEDURATIONNS "deviceMinVolumeDurationNs" // int64_t
+#define AMEDIAMETRICS_PROP_DEVICEMINVOLUME "deviceMinVolume" // double - minimum device volume
+
 #define AMEDIAMETRICS_PROP_DIRECTION      "direction"      // string AAudio input or output
 #define AMEDIAMETRICS_PROP_DURATIONNS     "durationNs"     // int64 duration time span
 #define AMEDIAMETRICS_PROP_ENCODING       "encoding"       // string value of format
@@ -126,11 +141,17 @@
 #define AMEDIAMETRICS_PROP_FLAGS          "flags"
 
 #define AMEDIAMETRICS_PROP_FRAMECOUNT     "frameCount"     // int32
+#define AMEDIAMETRICS_PROP_HEADTRACKINGMODES "headTrackingModes" // string |, like modes.
 #define AMEDIAMETRICS_PROP_INPUTDEVICES   "inputDevices"   // string value
 #define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
 #define AMEDIAMETRICS_PROP_INTERVALCOUNT  "intervalCount"  // int32
 #define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
+#define AMEDIAMETRICS_PROP_LEVELS         "levels"          // string | with levels
 #define AMEDIAMETRICS_PROP_LOGSESSIONID   "logSessionId"   // hex string, "" none
+#define AMEDIAMETRICS_PROP_METHODCODE     "methodCode"     // int64_t an int indicating method
+#define AMEDIAMETRICS_PROP_METHODNAME     "methodName"     // string method name
+#define AMEDIAMETRICS_PROP_MODE           "mode"           // string
+#define AMEDIAMETRICS_PROP_MODES          "modes"          // string | with modes
 #define AMEDIAMETRICS_PROP_NAME           "name"           // string value
 #define AMEDIAMETRICS_PROP_ORIGINALFLAGS  "originalFlags"  // int32
 #define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
@@ -215,6 +236,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_START      "start"  // AudioTrack, AudioRecord
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_STOP       "stop"   // AudioTrack, AudioRecord
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT    "timeout"  // AudioFlinger, AudioPolicy
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN   "underrun" // from Thread
 
 // Possible values for AMEDIAMETRICS_PROP_CALLERNAME
@@ -229,6 +251,11 @@
 #define AMEDIAMETRICS_PROP_CALLERNAME_VALUE_TONEGENERATOR "tonegenerator"  // dial tones
 #define AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN       "unknown"        // callerName not set
 
+// Many properties are available for the sensor.
+// The mode is how the sensor is being currently used.
+#define AMEDIAMETRICS_PROP_MODE_VALUE_HEAD          "head"        // used for head tracking
+#define AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN        "screen"      // used for screen
+
 // MediaMetrics errors are expected to cover the following sources:
 // https://docs.oracle.com/javase/7/docs/api/java/lang/RuntimeException.html
 // https://docs.oracle.com/javase/7/docs/api/java/lang/Exception.html
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index f55678d..266cb17 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -65,10 +65,14 @@
         "libstagefright_foundation",
         "libstagefright_httplive",
         "libutils",
+        "packagemanager_aidl-cpp",
     ],
 
     header_libs: [
         "media_plugin_headers",
+        "libmediautils_headers",
+        "libstagefright_rtsp_headers",
+        "libstagefright_webm_headers",
     ],
 
     static_libs: [
@@ -84,13 +88,16 @@
         "framework-permission-aidl-cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/rtsp",
-        "frameworks/av/media/libstagefright/webm",
+    export_header_lib_headers: [
+        "libmediautils_headers",
     ],
 
     local_include_dirs: ["include"],
 
+    export_include_dirs: [
+        ".",
+    ],
+
     cflags: [
         "-Werror",
         "-Wno-error=deprecated-declarations",
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index 05f7365..cd411ea 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -30,7 +30,7 @@
 #include "MediaPlayerFactory.h"
 
 #include "TestPlayerStub.h"
-#include "nuplayer/NuPlayerDriver.h"
+#include <nuplayer/NuPlayerDriver.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index f85887e..8e19d02 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -82,7 +82,7 @@
 #include "MediaPlayerFactory.h"
 
 #include "TestPlayerStub.h"
-#include "nuplayer/NuPlayerDriver.h"
+#include <nuplayer/NuPlayerDriver.h>
 
 
 static const int kDumpLockRetries = 50;
@@ -611,29 +611,42 @@
                 IPCThreadState::self()->getCallingUid());
         result.append(buffer);
     } else {
-        Mutex::Autolock lock(mLock);
-        for (int i = 0, n = mClients.size(); i < n; ++i) {
-            sp<Client> c = mClients[i].promote();
-            if (c != 0) c->dump(fd, args);
-            clients.add(c);
-        }
-        if (mMediaRecorderClients.size() == 0) {
-                result.append(" No media recorder client\n\n");
-        } else {
+        {
+            // capture clients under lock
+            Mutex::Autolock lock(mLock);
+            for (int i = 0, n = mClients.size(); i < n; ++i) {
+                sp<Client> c = mClients[i].promote();
+                if (c != nullptr) {
+                    clients.add(c);
+                }
+            }
+
             for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) {
                 sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote();
-                if (c != 0) {
-                    snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
-                            c->mAttributionSource.pid);
-                    result.append(buffer);
-                    write(fd, result.string(), result.size());
-                    result = "\n";
-                    c->dump(fd, args);
+                if (c != nullptr) {
                     mediaRecorderClients.add(c);
                 }
             }
         }
 
+        // dump clients outside of lock
+        for (const sp<Client> &c : clients) {
+            c->dump(fd, args);
+        }
+        if (mediaRecorderClients.size() == 0) {
+            result.append(" No media recorder client\n\n");
+        } else {
+            for (const sp<MediaRecorderClient> &c : mediaRecorderClients) {
+                snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
+                        c->mAttributionSource.pid);
+                result.append(buffer);
+                write(fd, result.string(), result.size());
+                result = "\n";
+                c->dump(fd, args);
+
+            }
+        }
+
         result.append(" Files opened and/or mapped:\n");
         snprintf(buffer, SIZE - 1, "/proc/%d/maps", getpid());
         FILE *f = fopen(buffer, "r");
@@ -1831,7 +1844,6 @@
 {
     close();
     free(mAttributes);
-    delete mCallbackData;
 }
 
 //static
@@ -2052,8 +2064,7 @@
 
         mRecycledTrack.clear();
         close_l();
-        delete mCallbackData;
-        mCallbackData = NULL;
+        mCallbackData.clear();
     }
 }
 
@@ -2174,7 +2185,7 @@
     }
 
     sp<AudioTrack> t;
-    CallbackData *newcbd = NULL;
+    sp<CallbackData> newcbd;
 
     // We don't attempt to create a new track if we are recycling an
     // offloaded track. But, if we are recycling a non-offloaded or we
@@ -2184,8 +2195,8 @@
     if (!(reuse && bothOffloaded)) {
         ALOGV("creating new AudioTrack");
 
-        if (mCallback != NULL) {
-            newcbd = new CallbackData(this);
+        if (mCallback != nullptr) {
+            newcbd = sp<CallbackData>::make(wp<AudioOutput>::fromExisting(this));
             t = new AudioTrack(
                     mStreamType,
                     sampleRate,
@@ -2193,7 +2204,6 @@
                     channelMask,
                     frameCount,
                     flags,
-                    CallbackWrapper,
                     newcbd,
                     0,  // notification frames
                     mSessionId,
@@ -2220,8 +2230,7 @@
                     channelMask,
                     frameCount,
                     flags,
-                    NULL, // callback
-                    NULL, // user data
+                    nullptr, // callback
                     0, // notification frames
                     mSessionId,
                     AudioTrack::TRANSFER_DEFAULT,
@@ -2237,8 +2246,7 @@
         t->setCallerName("media");
         if ((t == 0) || (t->initCheck() != NO_ERROR)) {
             ALOGE("Unable to create audio track");
-            delete newcbd;
-            // t goes out of scope, so reference count drops to zero
+            // t, newcbd goes out of scope, so reference count drops to zero
             return NO_INIT;
         } else {
             // successful AudioTrack initialization implies a legacy stream type was generated
@@ -2272,7 +2280,6 @@
             if (mCallbackData != NULL) {
                 mCallbackData->setOutput(this);
             }
-            delete newcbd;
             return updateTrack();
         }
     }
@@ -2378,7 +2385,7 @@
             if (mCallbackData != NULL) {
                 // two alternative approaches
 #if 1
-                CallbackData *callbackData = mCallbackData;
+                sp<CallbackData> callbackData = mCallbackData;
                 mLock.unlock();
                 // proper acquisition sequence
                 callbackData->lock();
@@ -2415,9 +2422,8 @@
             // for example, the next player could be prepared and seeked.
             //
             // Presuming it isn't advisable to force the track over.
-             if (mNextOutput->mTrack == NULL) {
+             if (mNextOutput->mTrack == nullptr) {
                 ALOGD("Recycling track for gapless playback");
-                delete mNextOutput->mCallbackData;
                 mNextOutput->mCallbackData = mCallbackData;
                 mNextOutput->mRecycledTrack = mTrack;
                 mNextOutput->mSampleRateHz = mSampleRateHz;
@@ -2425,11 +2431,11 @@
                 mNextOutput->mFlags = mFlags;
                 mNextOutput->mFrameSize = mFrameSize;
                 close_l();
-                mCallbackData = NULL;  // destruction handled by mNextOutput
+                mCallbackData.clear();
             } else {
                 ALOGW("Ignoring gapless playback because next player has already started");
                 // remove track in case resource needed for future players.
-                if (mCallbackData != NULL) {
+                if (mCallbackData != nullptr) {
                     mCallbackData->endTrackSwitch();  // release lock for callbacks before close.
                 }
                 close_l();
@@ -2656,76 +2662,71 @@
     }
 }
 
-// static
-void MediaPlayerService::AudioOutput::CallbackWrapper(
-        int event, void *cookie, void *info) {
-    //ALOGV("callbackwrapper");
-    CallbackData *data = (CallbackData*)cookie;
-    // lock to ensure we aren't caught in the middle of a track switch.
-    data->lock();
-    AudioOutput *me = data->getOutput();
-    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
-    if (me == NULL) {
-        // no output set, likely because the track was scheduled to be reused
-        // by another player, but the format turned out to be incompatible.
-        data->unlock();
-        if (buffer != NULL) {
-            buffer->size = 0;
-        }
+size_t MediaPlayerService::AudioOutput::CallbackData::onMoreData(const AudioTrack::Buffer& buffer) {
+    ALOGD("data callback");
+    lock();
+    sp<AudioOutput> me = getOutput();
+    if (me == nullptr) {
+        unlock();
+        return 0;
+    }
+    size_t actualSize = (*me->mCallback)(
+            me.get(), buffer.data(), buffer.size(), me->mCallbackCookie,
+            CB_EVENT_FILL_BUFFER);
+
+    // Log when no data is returned from the callback.
+    // (1) We may have no data (especially with network streaming sources).
+    // (2) We may have reached the EOS and the audio track is not stopped yet.
+    // Note that AwesomePlayer/AudioPlayer will only return zero size when it reaches the EOS.
+    // NuPlayerRenderer will return zero when it doesn't have data (it doesn't block to fill).
+    //
+    // This is a benign busy-wait, with the next data request generated 10 ms or more later;
+    // nevertheless for power reasons, we don't want to see too many of these.
+
+    ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+    unlock();
+    return actualSize;
+}
+
+void MediaPlayerService::AudioOutput::CallbackData::onStreamEnd() {
+    lock();
+    sp<AudioOutput> me = getOutput();
+    if (me == nullptr) {
+        unlock();
         return;
     }
+    ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
+    (*me->mCallback)(me.get(), NULL /* buffer */, 0 /* size */,
+            me->mCallbackCookie, CB_EVENT_STREAM_END);
+    unlock();
+}
 
-    switch(event) {
-    case AudioTrack::EVENT_MORE_DATA: {
-        size_t actualSize = (*me->mCallback)(
-                me, buffer->raw, buffer->size, me->mCallbackCookie,
-                CB_EVENT_FILL_BUFFER);
 
-        // Log when no data is returned from the callback.
-        // (1) We may have no data (especially with network streaming sources).
-        // (2) We may have reached the EOS and the audio track is not stopped yet.
-        // Note that AwesomePlayer/AudioPlayer will only return zero size when it reaches the EOS.
-        // NuPlayerRenderer will return zero when it doesn't have data (it doesn't block to fill).
-        //
-        // This is a benign busy-wait, with the next data request generated 10 ms or more later;
-        // nevertheless for power reasons, we don't want to see too many of these.
-
-        ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
-
-        buffer->size = actualSize;
-        } break;
-
-    case AudioTrack::EVENT_STREAM_END:
-        // currently only occurs for offloaded callbacks
-        ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
-        (*me->mCallback)(me, NULL /* buffer */, 0 /* size */,
-                me->mCallbackCookie, CB_EVENT_STREAM_END);
-        break;
-
-    case AudioTrack::EVENT_NEW_IAUDIOTRACK :
-        ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN");
-        (*me->mCallback)(me,  NULL /* buffer */, 0 /* size */,
-                me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
-        break;
-
-    case AudioTrack::EVENT_UNDERRUN:
-        // This occurs when there is no data available, typically
-        // when there is a failure to supply data to the AudioTrack.  It can also
-        // occur in non-offloaded mode when the audio device comes out of standby.
-        //
-        // If an AudioTrack underruns it outputs silence. Since this happens suddenly
-        // it may sound like an audible pop or glitch.
-        //
-        // The underrun event is sent once per track underrun; the condition is reset
-        // when more data is sent to the AudioTrack.
-        ALOGD("callbackwrapper: EVENT_UNDERRUN (discarded)");
-        break;
-
-    default:
-        ALOGE("received unknown event type: %d inside CallbackWrapper !", event);
+void MediaPlayerService::AudioOutput::CallbackData::onNewIAudioTrack() {
+    lock();
+    sp<AudioOutput> me = getOutput();
+    if (me == nullptr) {
+        unlock();
+        return;
     }
+    ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN");
+    (*me->mCallback)(me.get(),  NULL /* buffer */, 0 /* size */,
+            me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
+    unlock();
+}
 
-    data->unlock();
+void MediaPlayerService::AudioOutput::CallbackData::onUnderrun() {
+    // This occurs when there is no data available, typically
+    // when there is a failure to supply data to the AudioTrack.  It can also
+    // occur in non-offloaded mode when the audio device comes out of standby.
+    //
+    // If an AudioTrack underruns it outputs silence. Since this happens suddenly
+    // it may sound like an audible pop or glitch.
+    //
+    // The underrun event is sent once per track underrun; the condition is reset
+    // when more data is sent to the AudioTrack.
+    ALOGD("callbackwrapper: EVENT_UNDERRUN (discarded)");
+
 }
 
 audio_session_t MediaPlayerService::AudioOutput::getSessionId() const
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 98091be..86be3fe 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -30,9 +30,11 @@
 #include <media/AidlConversion.h>
 #include <media/AudioResamplerPublic.h>
 #include <media/AudioSystem.h>
+#include <media/AudioTrack.h>
 #include <media/MediaPlayerInterface.h>
 #include <media/Metadata.h>
 #include <media/stagefright/foundation/ABase.h>
+#include <mediautils/Synchronization.h>
 #include <android/content/AttributionSourceState.h>
 
 #include <system/audio.h>
@@ -41,7 +43,6 @@
 
 using content::AttributionSourceState;
 
-class AudioTrack;
 struct AVSyncSettings;
 class DeathNotifier;
 class IDataSource;
@@ -161,7 +162,7 @@
         sp<AudioOutput>         mNextOutput;
         AudioCallback           mCallback;
         void *                  mCallbackCookie;
-        CallbackData *          mCallbackData;
+        sp<CallbackData>        mCallbackData;
         audio_stream_type_t     mStreamType;
         audio_attributes_t *    mAttributes;
         float                   mLeftVolume;
@@ -189,15 +190,15 @@
         // CallbackData is what is passed to the AudioTrack as the "user" data.
         // We need to be able to target this to a different Output on the fly,
         // so we can't use the Output itself for this.
-        class CallbackData {
+        class CallbackData : public AudioTrack::IAudioTrackCallback {
             friend AudioOutput;
         public:
-            explicit CallbackData(AudioOutput *cookie) {
+            explicit CallbackData(const wp<AudioOutput>& cookie) {
                 mData = cookie;
                 mSwitching = false;
             }
-            AudioOutput *   getOutput() const { return mData; }
-            void            setOutput(AudioOutput* newcookie) { mData = newcookie; }
+            sp<AudioOutput> getOutput() const { return mData.load().promote(); }
+            void            setOutput(const wp<AudioOutput>& newcookie) { mData.store(newcookie); }
             // lock/unlock are used by the callback before accessing the payload of this object
             void            lock() const { mLock.lock(); }
             void            unlock() const { mLock.unlock(); }
@@ -220,8 +221,13 @@
                 }
                 mSwitching = false;
             }
+        protected:
+            size_t onMoreData(const AudioTrack::Buffer& buffer) override;
+            void onUnderrun() override;
+            void onStreamEnd() override;
+            void onNewIAudioTrack() override;
         private:
-            AudioOutput *   mData;
+            mediautils::atomic_wp<AudioOutput> mData;
             mutable Mutex   mLock; // a recursive mutex might make this unnecessary.
             bool            mSwitching;
             DISALLOW_EVIL_CONSTRUCTORS(CallbackData);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index a914006..4aa80be 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -126,8 +126,13 @@
     }
 
     if ((as == AUDIO_SOURCE_FM_TUNER
-            && !(captureAudioOutputAllowed(mAttributionSource)
+                && !(captureAudioOutputAllowed(mAttributionSource)
                     || captureTunerAudioInputAllowed(mAttributionSource)))
+            || (as == AUDIO_SOURCE_REMOTE_SUBMIX
+                && !(captureAudioOutputAllowed(mAttributionSource)
+                    || modifyAudioRoutingAllowed(mAttributionSource)))
+            || (as == AUDIO_SOURCE_ECHO_REFERENCE
+                && !captureAudioOutputAllowed(mAttributionSource))
             || !recordingAllowed(mAttributionSource, (audio_source_t)as)) {
         return PERMISSION_DENIED;
     }
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 2aabd53..b3f7f25 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -28,6 +28,7 @@
 #include <datasource/PlayerServiceDataSourceFactory.h>
 #include <datasource/PlayerServiceFileSource.h>
 #include <media/IMediaHTTPService.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaCodecList.h>
@@ -194,18 +195,8 @@
         return NULL;
     }
 
-    if (metaOnly) {
-        return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail);
-    }
-
-    sp<IMediaSource> source = mExtractor->getTrack(i);
-
-    if (source.get() == NULL) {
-        ALOGE("unable to instantiate image track.");
-        return NULL;
-    }
-
     const char *mime;
+    bool isHeif = false;
     if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
         ALOGE("image track has no mime type");
         return NULL;
@@ -215,20 +206,79 @@
         mime = MEDIA_MIMETYPE_VIDEO_HEVC;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
+        isHeif = true;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
         mime = MEDIA_MIMETYPE_VIDEO_AV1;
         trackMeta = new MetaData(*trackMeta);
         trackMeta->setCString(kKeyMIMEType, mime);
     }
 
+    sp<AMessage> format = new AMessage;
+    status_t err = convertMetaDataToMessage(trackMeta, &format);
+    if (err != OK) {
+        ALOGE("getImageInternal: convertMetaDataToMessage() failed, unable to extract image");
+        return NULL;
+    }
+
+    uint32_t bitDepth = 8;
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        int32_t profile;
+        if (format->findInt32("profile", &profile)) {
+            if (HEVCProfileMain10 == profile || HEVCProfileMain10HDR10 == profile ||
+                    HEVCProfileMain10HDR10Plus == profile) {
+                  bitDepth = 10;
+            }
+        }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
+        int32_t profile;
+        if (format->findInt32("profile", &profile)) {
+            if (AV1ProfileMain10 == profile || AV1ProfileMain10HDR10 == profile ||
+                    AV1ProfileMain10HDR10Plus == profile) {
+                  bitDepth = 10;
+            }
+        }
+    }
+
+    if (metaOnly) {
+        return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail, bitDepth);
+    }
+
+    sp<IMediaSource> source = mExtractor->getTrack(i);
+
+    if (source.get() == NULL) {
+        ALOGE("unable to instantiate image track.");
+        return NULL;
+    }
+
     bool preferhw = property_get_bool(
             "media.stagefright.thumbnail.prefer_hw_codecs", false);
     uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
     Vector<AString> matchingCodecs;
+
+    // If decoding thumbnail check decoder supports thumbnail dimensions instead
+    int32_t thumbHeight, thumbWidth;
+    if (thumbnail && format != NULL
+            && trackMeta->findInt32(kKeyThumbnailHeight, &thumbHeight)
+            && trackMeta->findInt32(kKeyThumbnailWidth, &thumbWidth)) {
+        format->setInt32("height", thumbHeight);
+        format->setInt32("width", thumbWidth);
+    }
+
+    // If decoding tiled HEIF check decoder supports tile dimensions instead
+    if (!thumbnail && isHeif && format != NULL) {
+        int32_t tileWidth, tileHeight;
+        if (trackMeta->findInt32(kKeyTileWidth, &tileWidth) && tileWidth > 0
+                && trackMeta->findInt32(kKeyTileHeight, &tileHeight) && tileHeight > 0) {
+            format->setInt32("height", tileHeight);
+            format->setInt32("width", tileWidth);
+        }
+    }
+
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
             flags,
+            format,
             &matchingCodecs);
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
@@ -348,11 +398,19 @@
     bool preferhw = property_get_bool(
             "media.stagefright.thumbnail.prefer_hw_codecs", false);
     uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
+    sp<AMessage> format = new AMessage;
+    status_t err = convertMetaDataToMessage(trackMeta, &format);
+    if (err != OK) {
+        ALOGE("getFrameInternal: convertMetaDataToMessage() failed, unable to extract frame");
+        return NULL;
+    }
+
     Vector<AString> matchingCodecs;
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
             flags,
+            format,
             &matchingCodecs);
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index bffd7b3..ea1fdf4 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -16,13 +16,16 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "StagefrightRecorder"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Trace.h>
 #include <inttypes.h>
 // TODO/workaround: including base logging now as it conflicts with ADebug.h
 // and it must be included first.
 #include <android-base/logging.h>
 #include <utils/Log.h>
 
-#include "WebmWriter.h"
+#include <webm/WebmWriter.h>
+
 #include "StagefrightRecorder.h"
 
 #include <algorithm>
@@ -64,7 +67,7 @@
 
 #include <system/audio.h>
 
-#include "ARTPWriter.h"
+#include <media/stagefright/rtsp/ARTPWriter.h>
 
 namespace android {
 
@@ -1856,6 +1859,7 @@
 // Set up the appropriate MediaSource depending on the chosen option
 status_t StagefrightRecorder::setupMediaSource(
                       sp<MediaSource> *mediaSource) {
+    ATRACE_CALL();
     if (mVideoSource == VIDEO_SOURCE_DEFAULT
             || mVideoSource == VIDEO_SOURCE_CAMERA) {
         sp<CameraSource> cameraSource;
@@ -1936,6 +1940,7 @@
 status_t StagefrightRecorder::setupVideoEncoder(
         const sp<MediaSource> &cameraSource,
         sp<MediaCodecSource> *source) {
+    ATRACE_CALL();
     source->clear();
 
     sp<AMessage> format = new AMessage();
@@ -2114,6 +2119,7 @@
 }
 
 status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+    ATRACE_CALL();
     status_t status = BAD_VALUE;
     if (OK != (status = checkAudioEncoderCapabilities())) {
         return status;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index d6de47f..d7785da 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -26,6 +26,7 @@
 #include <system/audio.h>
 
 #include <media/hardware/MetadataBufferType.h>
+#include <media/stagefright/foundation/AString.h>
 #include <android/content/AttributionSourceState.h>
 
 namespace android {
diff --git a/media/libmediaplayerservice/datasource/Android.bp b/media/libmediaplayerservice/datasource/Android.bp
index 19fc172..fea0332 100644
--- a/media/libmediaplayerservice/datasource/Android.bp
+++ b/media/libmediaplayerservice/datasource/Android.bp
@@ -19,7 +19,6 @@
     ],
 
     header_libs: [
-        "media_ndk_headers",
         "libmedia_headers",
     ],
 
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
new file mode 100644
index 0000000..a36f1d6
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libmediaplayerservice_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libmediaplayerservice_license",
+    ],
+}
+
+cc_defaults {
+    name: "libmediaplayerserviceFuzzer_defaults",
+    static_libs: [
+        "libmediaplayerservice",
+        "liblog",
+    ],
+    shared_libs: [
+        "framework-permission-aidl-cpp",
+        "libbinder",
+        "libcutils",
+        "libmedia",
+        "libstagefright",
+        "libutils",
+        "libstagefright_foundation",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "mediarecorder_fuzzer",
+    srcs: [
+        "mediarecorder_fuzzer.cpp",
+    ],
+    defaults: [
+        "libmediaplayerserviceFuzzer_defaults",
+    ],
+    static_libs: [
+        "libstagefright_rtsp",
+        "libbase",
+    ],
+    shared_libs: [
+        "av-types-aidl-cpp",
+        "media_permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libandroid_net",
+        "libcamera_client",
+        "libgui",
+        "libmediametrics",
+    ],
+}
+
+cc_fuzz {
+    name: "metadataretriever_fuzzer",
+    srcs: [
+        "metadataretriever_fuzzer.cpp",
+    ],
+    defaults: [
+        "libmediaplayerserviceFuzzer_defaults",
+    ],
+    static_libs: [
+        "libplayerservice_datasource",
+    ],
+    shared_libs: [
+        "libdatasource",
+        "libdrmframework",
+    ],
+}
+
+cc_fuzz {
+    name: "mediaplayer_fuzzer",
+    srcs: [
+        "mediaplayer_fuzzer.cpp",
+    ],
+    defaults: [
+        "libmediaplayerserviceFuzzer_defaults",
+    ],
+    static_libs: [
+        "libplayerservice_datasource",
+        "libstagefright_nuplayer",
+        "libstagefright_rtsp",
+        "libstagefright_timedtext",
+    ],
+    shared_libs: [
+        "android.hardware.media.c2@1.0",
+        "android.hardware.media.omx@1.0",
+        "av-types-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libbase",
+        "libactivitymanager_aidl",
+        "libandroid_net",
+        "libaudioclient",
+        "libcamera_client",
+        "libcodec2_client",
+        "libcrypto",
+        "libdatasource",
+        "libdrmframework",
+        "libgui",
+        "libhidlbase",
+        "liblog",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmediadrm",
+        "libmediametrics",
+        "libmediautils",
+        "libmemunreachable",
+        "libnetd_client",
+        "libpowermanager",
+        "libstagefright_httplive",
+    ],
+}
diff --git a/media/libmediaplayerservice/fuzzer/README.md b/media/libmediaplayerservice/fuzzer/README.md
new file mode 100644
index 0000000..a93c809
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/README.md
@@ -0,0 +1,83 @@
+# Fuzzer for libmediaplayerservice
+## Table of contents
++ [StagefrightMediaRecorder](#StagefrightMediaRecorder)
++ [StagefrightMetadataRetriever](#StagefrightMetadataRetriever)
++ [MediaPlayer](#MediaPlayer)
+
+# <a name="StagefrightMediaRecorder"></a> Fuzzer for StagefrightMediaRecorder
+
+StagefrightMediaRecorder supports the following parameters:
+1. Output Formats (parameter name: `setOutputFormat`)
+2. Audio Encoders (parameter name: `setAudioEncoder`)
+3. Video Encoders (parameter name: `setVideoEncoder`)
+4. Audio Sources (parameter name: `setAudioSource`)
+5. Video Sources (parameter name: `setVideoSource`)
+6. Microphone Direction (parameter name: `setMicrophoneDirection`)
+
+You can find the possible values in the fuzzer's source code.
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mediarecorder_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mediarecorder_fuzzer/mediarecorder_fuzzer
+```
+
+# <a name="StagefrightMetadataRetriever"></a> Fuzzer for StagefrightMetadataRetriever
+
+StagefrightMetadataRetriever supports the following data sources:
+1. Url (parameter name: `url`)
+2. File descriptor (parameter name: `fd`)
+3. DataSource (parameter name: `source`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `url` | Url of data source | Value obtained from FuzzedDataProvider |
+| `fd` | File descriptor value of input file | Value obtained from FuzzedDataProvider |
+| `source` | DataSource object | Data obtained from FuzzedDataProvider |
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) metadataretriever_fuzzer
+```
+2. To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/metadataretriever_fuzzer/metadataretriever_fuzzer
+```
+
+# <a name="MediaPlayer"></a> Fuzzer for MediaPlayer
+
+MediaPlayerService supports the following data sources:
+1. Url (parameter name: `url`)
+2. File descriptor (parameter name: `fd`)
+3. IStreamSource  (parameter name: `source`)
+4. IDataSource (parameter name: `source`)
+5. RTP Parameters  (parameter name: `rtpParams`)
+
+MediaPlayerService supports the following parameters:
+1. Audio sessions (parameter name: `audioSessionId`)
+2. Audio stretch modes (parameter name: `mStretchMode`)
+3. Audio fallback modes  (parameter name: `mFallbackMode`)
+4. Media parameter keys (parameter name: `key`)
+5. Audio Stream Types (parameter name: `streamType`)
+6. Media Event Types (parameter name: `msg`)
+7. Media Info Types (parameter name: `ext1`)
+
+You can find the possible values in the fuzzer's source code.
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) mediaplayer_fuzzer
+```
+2. To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/mediaplayer_fuzzer/mediaplayer_fuzzer
+```
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
new file mode 100644
index 0000000..7799f44
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -0,0 +1,368 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <MediaPlayerService.h>
+#include <camera/Camera.h>
+#include <datasource/FileSource.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <media/IMediaCodecList.h>
+#include <media/IMediaHTTPService.h>
+#include <media/IMediaPlayer.h>
+#include <media/IMediaRecorder.h>
+#include <media/IRemoteDisplay.h>
+#include <media/IRemoteDisplayClient.h>
+#include <media/stagefright/RemoteDataSource.h>
+#include <media/stagefright/foundation/base64.h>
+#include <thread>
+#include "fuzzer/FuzzedDataProvider.h"
+
+constexpr int32_t kUuidSize = 16;
+constexpr int32_t kMaxSleepTimeInMs = 100;
+constexpr int32_t kMinSleepTimeInMs = 0;
+constexpr int32_t kPlayCountMin = 1;
+constexpr int32_t kPlayCountMax = 10;
+constexpr int32_t kMaxDimension = 8192;
+constexpr int32_t kMinDimension = 0;
+
+using namespace std;
+using namespace android;
+
+constexpr audio_session_t kSupportedAudioSessions[] = {
+    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_STAGE, AUDIO_SESSION_OUTPUT_MIX};
+
+constexpr audio_timestretch_stretch_mode_t kAudioStretchModes[] = {
+    AUDIO_TIMESTRETCH_STRETCH_DEFAULT, AUDIO_TIMESTRETCH_STRETCH_VOICE};
+
+constexpr audio_timestretch_fallback_mode_t kAudioFallbackModes[] = {
+    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT, AUDIO_TIMESTRETCH_FALLBACK_DEFAULT,
+    AUDIO_TIMESTRETCH_FALLBACK_MUTE, AUDIO_TIMESTRETCH_FALLBACK_FAIL};
+
+constexpr media_parameter_keys kMediaParamKeys[] = {
+    KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS, KEY_PARAMETER_AUDIO_CHANNEL_COUNT,
+    KEY_PARAMETER_PLAYBACK_RATE_PERMILLE, KEY_PARAMETER_AUDIO_ATTRIBUTES,
+    KEY_PARAMETER_RTP_ATTRIBUTES};
+
+constexpr audio_stream_type_t kAudioStreamTypes[] = {
+    AUDIO_STREAM_DEFAULT,      AUDIO_STREAM_VOICE_CALL,    AUDIO_STREAM_SYSTEM,
+    AUDIO_STREAM_RING,         AUDIO_STREAM_MUSIC,         AUDIO_STREAM_ALARM,
+    AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_BLUETOOTH_SCO, AUDIO_STREAM_ENFORCED_AUDIBLE,
+    AUDIO_STREAM_DTMF,         AUDIO_STREAM_TTS,           AUDIO_STREAM_ASSISTANT};
+
+constexpr media_event_type kMediaEventTypes[] = {MEDIA_NOP,
+                                                 MEDIA_PREPARED,
+                                                 MEDIA_PLAYBACK_COMPLETE,
+                                                 MEDIA_BUFFERING_UPDATE,
+                                                 MEDIA_SEEK_COMPLETE,
+                                                 MEDIA_SET_VIDEO_SIZE,
+                                                 MEDIA_STARTED,
+                                                 MEDIA_PAUSED,
+                                                 MEDIA_STOPPED,
+                                                 MEDIA_SKIPPED,
+                                                 MEDIA_NOTIFY_TIME,
+                                                 MEDIA_TIMED_TEXT,
+                                                 MEDIA_ERROR,
+                                                 MEDIA_INFO,
+                                                 MEDIA_SUBTITLE_DATA,
+                                                 MEDIA_META_DATA,
+                                                 MEDIA_DRM_INFO,
+                                                 MEDIA_TIME_DISCONTINUITY,
+                                                 MEDIA_IMS_RX_NOTICE,
+                                                 MEDIA_AUDIO_ROUTING_CHANGED};
+
+constexpr media_info_type kMediaInfoTypes[] = {
+    MEDIA_INFO_UNKNOWN,           MEDIA_INFO_STARTED_AS_NEXT,
+    MEDIA_INFO_RENDERING_START,   MEDIA_INFO_VIDEO_TRACK_LAGGING,
+    MEDIA_INFO_BUFFERING_START,   MEDIA_INFO_BUFFERING_END,
+    MEDIA_INFO_NETWORK_BANDWIDTH, MEDIA_INFO_BAD_INTERLEAVING,
+    MEDIA_INFO_NOT_SEEKABLE,      MEDIA_INFO_METADATA_UPDATE,
+    MEDIA_INFO_PLAY_AUDIO_ERROR,  MEDIA_INFO_PLAY_VIDEO_ERROR,
+    MEDIA_INFO_TIMED_TEXT_ERROR};
+
+const char *kUrlPrefix[] = {"data:", "http://", "https://", "rtsp://", "content://", "test://"};
+
+struct TestStreamSource : public IStreamSource {
+    void setListener(const sp<IStreamListener> & /*listener*/) override{};
+    void setBuffers(const Vector<sp<IMemory>> & /*buffers*/) override{};
+    void onBufferAvailable(size_t /*index*/) override{};
+    IBinder *onAsBinder() { return nullptr; };
+};
+
+class BinderDeathNotifier : public IBinder::DeathRecipient {
+   public:
+    void binderDied(const wp<IBinder> &) { abort(); }
+};
+
+class MediaPlayerServiceFuzzer {
+   public:
+    MediaPlayerServiceFuzzer(const uint8_t *data, size_t size)
+        : mFdp(data, size), mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)){};
+    ~MediaPlayerServiceFuzzer() { close(mDataSourceFd); };
+    void process(const uint8_t *data, size_t size);
+
+   private:
+    bool setDataSource(const uint8_t *data, size_t size);
+    void invokeMediaPlayer();
+    FuzzedDataProvider mFdp;
+    sp<IMediaPlayer> mMediaPlayer = nullptr;
+    sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
+    const int32_t mDataSourceFd;
+};
+
+bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t *data, size_t size) {
+    status_t status = -1;
+    enum DataSourceType {http, fd, stream, file, socket, kMaxValue = socket};
+    switch (mFdp.ConsumeEnum<DataSourceType>()) {
+        case http: {
+            KeyedVector<String8, String8> headers;
+            headers.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
+                        String8(mFdp.ConsumeRandomLengthString().c_str()));
+
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
+
+            string uri(mFdp.PickValueInArray(kUrlPrefix));
+            uri += ";base64,";
+            AString out;
+            encodeBase64(uriSuffix.data(), uriSuffix.size(), &out);
+            uri += out.c_str();
+            status = mMediaPlayer->setDataSource(nullptr /*httpService*/, uri.c_str(), &headers);
+            break;
+        }
+        case fd: {
+            write(mDataSourceFd, data, size);
+
+            status = mMediaPlayer->setDataSource(mDataSourceFd, 0, size);
+            break;
+        }
+        case stream: {
+            sp<IStreamSource> streamSource = sp<TestStreamSource>::make();
+            status = mMediaPlayer->setDataSource(streamSource);
+            break;
+        }
+        case file: {
+            write(mDataSourceFd, data, size);
+
+            sp<DataSource> dataSource = new FileSource(dup(mDataSourceFd), 0, size);
+            sp<IDataSource> iDataSource = RemoteDataSource::wrap(dataSource);
+            if (!iDataSource) {
+                return false;
+            }
+            status = mMediaPlayer->setDataSource(iDataSource);
+            break;
+        }
+        case socket: {
+            String8 rtpParams = String8(mFdp.ConsumeRandomLengthString().c_str());
+            struct sockaddr_in endpoint;
+            endpoint.sin_family = mFdp.ConsumeIntegral<unsigned short>();
+            endpoint.sin_port = mFdp.ConsumeIntegral<uint16_t>();
+            mMediaPlayer->setRetransmitEndpoint(&endpoint);
+            status = mMediaPlayer->setDataSource(rtpParams);
+            break;
+        }
+    }
+
+    if (status != 0) {
+        return false;
+    }
+    return true;
+}
+
+void MediaPlayerServiceFuzzer::invokeMediaPlayer() {
+    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+    String8 name = String8(mFdp.ConsumeRandomLengthString().c_str());
+    uint32_t width = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
+    uint32_t height = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
+    uint32_t pixelFormat = mFdp.ConsumeIntegral<int32_t>();
+    uint32_t flags = mFdp.ConsumeIntegral<int32_t>();
+    sp<SurfaceControl> surfaceControl =
+        composerClient->createSurface(name, width, height, pixelFormat, flags);
+    if (surfaceControl) {
+        sp<Surface> surface = surfaceControl->getSurface();
+        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
+    }
+
+    BufferingSettings buffering;
+    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
+    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
+    mMediaPlayer->setBufferingSettings(buffering);
+    mMediaPlayer->getBufferingSettings(&buffering);
+
+    mMediaPlayer->prepareAsync();
+    size_t playCount = mFdp.ConsumeIntegralInRange<size_t>(kPlayCountMin, kPlayCountMax);
+    for (size_t Idx = 0; Idx < playCount; ++Idx) {
+        mMediaPlayer->start();
+        this_thread::sleep_for(chrono::milliseconds(
+            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
+        mMediaPlayer->pause();
+        this_thread::sleep_for(chrono::milliseconds(
+            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
+        mMediaPlayer->stop();
+    }
+    bool state;
+    mMediaPlayer->isPlaying(&state);
+
+    AudioPlaybackRate rate;
+    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
+    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
+    rate.mStretchMode = mFdp.PickValueInArray(kAudioStretchModes);
+    rate.mFallbackMode = mFdp.PickValueInArray(kAudioFallbackModes);
+    mMediaPlayer->setPlaybackSettings(rate);
+    mMediaPlayer->getPlaybackSettings(&rate);
+
+    AVSyncSettings *avSyncSettings = new AVSyncSettings();
+    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
+    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
+    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
+    delete avSyncSettings;
+
+    mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>());
+
+    int32_t msec;
+    mMediaPlayer->getCurrentPosition(&msec);
+    mMediaPlayer->getDuration(&msec);
+    mMediaPlayer->reset();
+
+    mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<int64_t>());
+
+    mMediaPlayer->setAudioStreamType(mFdp.PickValueInArray(kAudioStreamTypes));
+    mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>());
+    float left = mFdp.ConsumeFloatingPoint<float>();
+    float right = mFdp.ConsumeFloatingPoint<float>();
+    mMediaPlayer->setVolume(left, right);
+
+    Parcel request, reply;
+    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+    request.setDataPosition(0);
+    mMediaPlayer->invoke(request, &reply);
+
+    Parcel filter;
+    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+    filter.setDataPosition(0);
+    mMediaPlayer->setMetadataFilter(filter);
+
+    bool updateOnly = mFdp.ConsumeBool();
+    bool applyFilter = mFdp.ConsumeBool();
+    mMediaPlayer->getMetadata(updateOnly, applyFilter, &reply);
+    mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>());
+    mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>());
+
+    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
+    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+    request.setDataPosition(0);
+    mMediaPlayer->setParameter(key, request);
+    key = mFdp.PickValueInArray(kMediaParamKeys);
+    mMediaPlayer->getParameter(key, &reply);
+
+    struct sockaddr_in endpoint;
+    mMediaPlayer->getRetransmitEndpoint(&endpoint);
+
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+    attributionSource.token = sp<BBinder>::make();
+    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
+    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
+        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
+    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
+
+    const sp<media::VolumeShaper::Configuration> configuration =
+        sp<media::VolumeShaper::Configuration>::make();
+    const sp<media::VolumeShaper::Operation> operation = sp<media::VolumeShaper::Operation>::make();
+    mMediaPlayer->applyVolumeShaper(configuration, operation);
+
+    mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>());
+    uint8_t uuid[kUuidSize];
+    for (int32_t index = 0; index < kUuidSize; ++index) {
+        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
+    }
+    Vector<uint8_t> drmSessionId;
+    drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
+    mMediaPlayer->prepareDrm(uuid, drmSessionId);
+    mMediaPlayer->releaseDrm();
+
+    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
+    mMediaPlayer->setOutputDevice(deviceId);
+    mMediaPlayer->getRoutedDeviceId(&deviceId);
+
+    mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool());
+
+    sp<MediaPlayer> mediaPlayer = (MediaPlayer *)mMediaPlayer.get();
+
+    int32_t msg = mFdp.PickValueInArray(kMediaEventTypes);
+    int32_t ext1 = mFdp.PickValueInArray(kMediaInfoTypes);
+    int32_t ext2 = mFdp.ConsumeIntegral<int32_t>();
+    Parcel obj;
+    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+    obj.setDataPosition(0);
+    mediaPlayer->notify(msg, ext1, ext2, &obj);
+
+    int32_t mediaPlayerDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
+    Vector<String16> args;
+    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+    mediaPlayer->dump(mediaPlayerDumpFd, args);
+    close(mediaPlayerDumpFd);
+
+    mMediaPlayer->disconnect();
+}
+
+void MediaPlayerServiceFuzzer::process(const uint8_t *data, size_t size) {
+    MediaPlayerService::instantiate();
+
+    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
+    if (!mpService) {
+        return;
+    }
+
+    sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
+
+    sp<IRemoteDisplayClient> remoteDisplayClient;
+    sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
+        String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/, remoteDisplayClient,
+        String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
+
+    mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
+    Parcel reply;
+    mpService->pullBatteryData(&reply);
+
+    sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService *)mpService.get();
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+    attributionSource.token = sp<BBinder>::make();
+    mMediaPlayer = mediaPlayerService->create(
+        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
+
+    int32_t mediaPlayerServiceDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
+    Vector<String16> args;
+    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+    mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
+    close(mediaPlayerServiceDumpFd);
+
+    if (!mMediaPlayer) {
+        return;
+    }
+
+    if (setDataSource(data, size)) {
+        invokeMediaPlayer();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    MediaPlayerServiceFuzzer mpsFuzzer(data, size);
+    ProcessState::self()->startThreadPool();
+    mpsFuzzer.process(data, size);
+    return 0;
+};
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
new file mode 100644
index 0000000..b0040fe
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -0,0 +1,312 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <media/stagefright/foundation/AString.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+#include <StagefrightRecorder.h>
+#include <camera/Camera.h>
+#include <camera/android/hardware/ICamera.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <media/stagefright/PersistentSurface.h>
+#include <thread>
+
+using namespace std;
+using namespace android;
+using namespace android::hardware;
+
+constexpr video_source kSupportedVideoSources[] = {VIDEO_SOURCE_DEFAULT, VIDEO_SOURCE_CAMERA,
+                                                   VIDEO_SOURCE_SURFACE};
+
+constexpr audio_source_t kSupportedAudioSources[] = {
+    AUDIO_SOURCE_DEFAULT,           AUDIO_SOURCE_MIC,
+    AUDIO_SOURCE_VOICE_UPLINK,      AUDIO_SOURCE_VOICE_DOWNLINK,
+    AUDIO_SOURCE_VOICE_CALL,        AUDIO_SOURCE_CAMCORDER,
+    AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_SOURCE_VOICE_COMMUNICATION,
+    AUDIO_SOURCE_REMOTE_SUBMIX,     AUDIO_SOURCE_UNPROCESSED,
+    AUDIO_SOURCE_VOICE_PERFORMANCE, AUDIO_SOURCE_ECHO_REFERENCE,
+    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD};
+
+constexpr audio_microphone_direction_t kSupportedMicrophoneDirections[] = {
+    MIC_DIRECTION_UNSPECIFIED, MIC_DIRECTION_FRONT, MIC_DIRECTION_BACK, MIC_DIRECTION_EXTERNAL};
+
+struct RecordingConfig {
+    output_format outputFormat;
+    audio_encoder audioEncoder;
+    video_encoder videoEncoder;
+};
+
+const struct RecordingConfig kRecordingConfigList[] = {
+    {OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_HE_AAC, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC_ELD, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_RTP_AVP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_DEFAULT},
+    {OUTPUT_FORMAT_MPEG2TS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
+    {OUTPUT_FORMAT_WEBM, AUDIO_ENCODER_VORBIS, VIDEO_ENCODER_VP8},
+    {OUTPUT_FORMAT_THREE_GPP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
+    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
+    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
+    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_HEVC}};
+
+const string kParametersList[] = {"max-duration",
+                                  "max-filesize",
+                                  "interleave-duration-us",
+                                  "param-movie-time-scale",
+                                  "param-geotag-longitude",
+                                  "param-geotag-latitude",
+                                  "param-track-time-status",
+                                  "audio-param-sampling-rate",
+                                  "audio-param-encoding-bitrate",
+                                  "audio-param-number-of-channels",
+                                  "audio-param-time-scale",
+                                  "video-param-rotation-angle-degrees",
+                                  "video-param-encoding-bitrate",
+                                  "video-param-bitrate-mode",
+                                  "video-param-i-frames-interval",
+                                  "video-param-encoder-profile",
+                                  "video-param-encoder-level",
+                                  "video-param-camera-id",
+                                  "video-param-time-scale",
+                                  "param-use-64bit-offset",
+                                  "time-lapse-enable",
+                                  "time-lapse-fps",
+                                  "rtp-param-local-ip",
+                                  "rtp-param-local-port",
+                                  "rtp-param-remote-port",
+                                  "rtp-param-self-id",
+                                  "rtp-param-opponent-id",
+                                  "rtp-param-payload-type",
+                                  "rtp-param-ext-cvo-extmap",
+                                  "rtp-param-ext-cvo-degrees",
+                                  "video-param-request-i-frame",
+                                  "rtp-param-set-socket-dscp",
+                                  "rtp-param-set-socket-network"};
+
+constexpr int32_t kMaxSleepTimeInMs = 100;
+constexpr int32_t kMinSleepTimeInMs = 0;
+constexpr int32_t kMinVideoSize = 2;
+constexpr int32_t kMaxVideoSize = 8192;
+constexpr int32_t kNumRecordMin = 1;
+constexpr int32_t kNumRecordMax = 10;
+
+class TestAudioDeviceCallback : public AudioSystem::AudioDeviceCallback {
+   public:
+    virtual ~TestAudioDeviceCallback() = default;
+
+    void onAudioDeviceUpdate(audio_io_handle_t /*audioIo*/,
+                             audio_port_handle_t /*deviceId*/) override{};
+};
+
+class TestCamera : public ICamera {
+   public:
+    virtual ~TestCamera() = default;
+
+    binder::Status disconnect() override { return binder::Status::ok(); };
+    status_t connect(const sp<ICameraClient> & /*client*/) override { return 0; };
+    status_t lock() override { return 0; };
+    status_t unlock() override { return 0; };
+    status_t setPreviewTarget(const sp<IGraphicBufferProducer> & /*bufferProducer*/) override {
+        return 0;
+    };
+    void setPreviewCallbackFlag(int /*flag*/) override{};
+    status_t setPreviewCallbackTarget(
+        const sp<IGraphicBufferProducer> & /*callbackProducer*/) override {
+        return 0;
+    };
+    status_t startPreview() override { return 0; };
+    void stopPreview() override{};
+    bool previewEnabled() override { return true; };
+    status_t startRecording() override { return 0; };
+    void stopRecording() override{};
+    bool recordingEnabled() override { return true; };
+    void releaseRecordingFrame(const sp<IMemory> & /*mem*/) override{};
+    void releaseRecordingFrameHandle(native_handle_t * /*handle*/) override{};
+    void releaseRecordingFrameHandleBatch(const vector<native_handle_t *> & /*handles*/) override{};
+    status_t autoFocus() override { return 0; };
+    status_t cancelAutoFocus() override { return 0; };
+    status_t takePicture(int /*msgType*/) override { return 0; };
+    status_t setParameters(const String8 & /*params*/) override { return 0; };
+    String8 getParameters() const override { return String8(); };
+    status_t sendCommand(int32_t /*cmd*/, int32_t /*arg1*/, int32_t /*arg2*/) override {
+        return 0;
+    };
+    status_t setVideoBufferMode(int32_t /*videoBufferMode*/) override { return 0; };
+    status_t setVideoTarget(const sp<IGraphicBufferProducer> & /*bufferProducer*/) override {
+        return 0;
+    };
+    status_t setAudioRestriction(int32_t /*mode*/) override { return 0; };
+    int32_t getGlobalAudioRestriction() override { return 0; };
+    IBinder *onAsBinder() override { return reinterpret_cast<IBinder *>(this); };
+};
+
+class TestMediaRecorderClient : public IMediaRecorderClient {
+   public:
+    virtual ~TestMediaRecorderClient() = default;
+
+    void notify(int /*msg*/, int /*ext1*/, int /*ext2*/) override{};
+    IBinder *onAsBinder() override { return reinterpret_cast<IBinder *>(this); };
+};
+
+class MediaRecorderClientFuzzer {
+   public:
+    MediaRecorderClientFuzzer(const uint8_t *data, size_t size);
+    ~MediaRecorderClientFuzzer() { close(mMediaRecorderOutputFd); }
+    void process();
+
+   private:
+    void setConfig();
+    void getConfig();
+    void dumpInfo();
+
+    FuzzedDataProvider mFdp;
+    unique_ptr<MediaRecorderBase> mStfRecorder = nullptr;
+    SurfaceComposerClient mComposerClient;
+    sp<SurfaceControl> mSurfaceControl = nullptr;
+    sp<Surface> mSurface = nullptr;
+    const int32_t mMediaRecorderOutputFd;
+};
+
+void MediaRecorderClientFuzzer::getConfig() {
+    int32_t max;
+    mStfRecorder->getMaxAmplitude(&max);
+
+    int32_t deviceId = mFdp.ConsumeIntegral<int32_t>();
+    mStfRecorder->setInputDevice(deviceId);
+    mStfRecorder->getRoutedDeviceId(&deviceId);
+
+    vector<android::media::MicrophoneInfo> activeMicrophones{};
+    mStfRecorder->getActiveMicrophones(&activeMicrophones);
+
+    int32_t portId;
+    mStfRecorder->getPortId(&portId);
+
+    uint64_t bytes;
+    mStfRecorder->getRtpDataUsage(&bytes);
+
+    Parcel parcel;
+    mStfRecorder->getMetrics(&parcel);
+
+    sp<IGraphicBufferProducer> buffer = mStfRecorder->querySurfaceMediaSource();
+}
+
+void MediaRecorderClientFuzzer::dumpInfo() {
+    int32_t dumpFd = memfd_create("DumpFile", MFD_ALLOW_SEALING);
+    Vector<String16> args;
+    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+    mStfRecorder->dump(dumpFd, args);
+    close(dumpFd);
+}
+
+void MediaRecorderClientFuzzer::setConfig() {
+    mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
+    mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
+    mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
+    mStfRecorder->setPreferredMicrophoneDirection(
+        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
+    mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool());
+    bool isPrivacySensitive;
+    mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
+    mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize),
+                               mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize));
+    mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>());
+    mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool());
+    mStfRecorder->setPreferredMicrophoneFieldDimension(mFdp.ConsumeFloatingPoint<float>());
+    mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
+
+    int32_t Idx = mFdp.ConsumeIntegralInRange<int32_t>(0, size(kRecordingConfigList) - 1);
+    mStfRecorder->setOutputFormat(kRecordingConfigList[Idx].outputFormat);
+    mStfRecorder->setAudioEncoder(kRecordingConfigList[Idx].audioEncoder);
+    mStfRecorder->setVideoEncoder(kRecordingConfigList[Idx].videoEncoder);
+
+    int32_t nextOutputFd = memfd_create("NextOutputFile", MFD_ALLOW_SEALING);
+    mStfRecorder->setNextOutputFile(nextOutputFd);
+    close(nextOutputFd);
+
+    for (Idx = 0; Idx < size(kParametersList); ++Idx) {
+        if (mFdp.ConsumeBool()) {
+            int32_t value = mFdp.ConsumeIntegral<int32_t>();
+            mStfRecorder->setParameters(
+                String8((kParametersList[Idx] + "=" + to_string(value)).c_str()));
+        }
+    }
+}
+
+MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t *data, size_t size)
+    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create("OutputFile", MFD_ALLOW_SEALING)) {
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+    attributionSource.token = sp<BBinder>::make();
+    mStfRecorder = make_unique<StagefrightRecorder>(attributionSource);
+
+    mSurfaceControl = mComposerClient.createSurface(
+        String8(mFdp.ConsumeRandomLengthString().c_str()), mFdp.ConsumeIntegral<uint32_t>(),
+        mFdp.ConsumeIntegral<uint32_t>(), mFdp.ConsumeIntegral<int32_t>(),
+        mFdp.ConsumeIntegral<int32_t>());
+    if (mSurfaceControl) {
+        mSurface = mSurfaceControl->getSurface();
+        mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
+    }
+
+    sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
+    mStfRecorder->setListener(listener);
+
+    sp<TestCamera> testCamera = sp<TestCamera>::make();
+    sp<Camera> camera = Camera::create(testCamera);
+    mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
+
+    sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
+    mStfRecorder->setInputSurface(persistentSurface);
+
+    sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
+    mStfRecorder->setAudioDeviceCallback(callback);
+}
+
+void MediaRecorderClientFuzzer::process() {
+    setConfig();
+
+    mStfRecorder->init();
+    mStfRecorder->prepare();
+    size_t numRecord = mFdp.ConsumeIntegralInRange<size_t>(kNumRecordMin, kNumRecordMax);
+    for (size_t Idx = 0; Idx < numRecord; ++Idx) {
+        mStfRecorder->start();
+        this_thread::sleep_for(chrono::milliseconds(
+            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
+        mStfRecorder->pause();
+        this_thread::sleep_for(chrono::milliseconds(
+            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
+        mStfRecorder->resume();
+        this_thread::sleep_for(chrono::milliseconds(
+            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
+        mStfRecorder->stop();
+    }
+    dumpInfo();
+    getConfig();
+
+    mStfRecorder->close();
+    mStfRecorder->reset();
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    MediaRecorderClientFuzzer mrcFuzzer(data, size);
+    mrcFuzzer.process();
+    return 0;
+}
diff --git a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
new file mode 100644
index 0000000..a7cb689
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <StagefrightMetadataRetriever.h>
+#include <binder/ProcessState.h>
+#include <datasource/FileSource.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/base64.h>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+using namespace std;
+using namespace android;
+
+const char *kMimeTypes[] = {MEDIA_MIMETYPE_IMAGE_JPEG,         MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
+                            MEDIA_MIMETYPE_VIDEO_VP8,          MEDIA_MIMETYPE_VIDEO_VP9,
+                            MEDIA_MIMETYPE_VIDEO_AV1,          MEDIA_MIMETYPE_VIDEO_AVC,
+                            MEDIA_MIMETYPE_VIDEO_HEVC,         MEDIA_MIMETYPE_VIDEO_MPEG4,
+                            MEDIA_MIMETYPE_VIDEO_H263,         MEDIA_MIMETYPE_VIDEO_MPEG2,
+                            MEDIA_MIMETYPE_VIDEO_RAW,          MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+                            MEDIA_MIMETYPE_VIDEO_SCRAMBLED,    MEDIA_MIMETYPE_VIDEO_DIVX,
+                            MEDIA_MIMETYPE_VIDEO_DIVX3,        MEDIA_MIMETYPE_VIDEO_XVID,
+                            MEDIA_MIMETYPE_VIDEO_MJPEG,        MEDIA_MIMETYPE_AUDIO_AMR_NB,
+                            MEDIA_MIMETYPE_AUDIO_AMR_WB,       MEDIA_MIMETYPE_AUDIO_MPEG,
+                            MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+                            MEDIA_MIMETYPE_AUDIO_MIDI,         MEDIA_MIMETYPE_AUDIO_AAC,
+                            MEDIA_MIMETYPE_AUDIO_QCELP,        MEDIA_MIMETYPE_AUDIO_VORBIS,
+                            MEDIA_MIMETYPE_AUDIO_OPUS,         MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+                            MEDIA_MIMETYPE_AUDIO_G711_MLAW,    MEDIA_MIMETYPE_AUDIO_RAW,
+                            MEDIA_MIMETYPE_AUDIO_FLAC,         MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+                            MEDIA_MIMETYPE_AUDIO_MSGSM,        MEDIA_MIMETYPE_AUDIO_AC3,
+                            MEDIA_MIMETYPE_AUDIO_EAC3,         MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+                            MEDIA_MIMETYPE_AUDIO_AC4,          MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+                            MEDIA_MIMETYPE_AUDIO_ALAC,         MEDIA_MIMETYPE_AUDIO_WMA,
+                            MEDIA_MIMETYPE_AUDIO_MS_ADPCM,     MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+                            MEDIA_MIMETYPE_CONTAINER_MPEG4,    MEDIA_MIMETYPE_CONTAINER_WAV,
+                            MEDIA_MIMETYPE_CONTAINER_OGG,      MEDIA_MIMETYPE_CONTAINER_MATROSKA,
+                            MEDIA_MIMETYPE_CONTAINER_MPEG2TS,  MEDIA_MIMETYPE_CONTAINER_AVI,
+                            MEDIA_MIMETYPE_CONTAINER_MPEG2PS,  MEDIA_MIMETYPE_CONTAINER_HEIF,
+                            MEDIA_MIMETYPE_TEXT_3GPP,          MEDIA_MIMETYPE_TEXT_SUBRIP,
+                            MEDIA_MIMETYPE_TEXT_VTT,           MEDIA_MIMETYPE_TEXT_CEA_608,
+                            MEDIA_MIMETYPE_TEXT_CEA_708,       MEDIA_MIMETYPE_DATA_TIMED_ID3};
+
+class MetadataRetrieverFuzzer {
+   public:
+    MetadataRetrieverFuzzer(const uint8_t *data, size_t size)
+        : mFdp(data, size),
+          mMdRetriever(new StagefrightMetadataRetriever()),
+          mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)) {}
+    ~MetadataRetrieverFuzzer() { close(mDataSourceFd); }
+    bool setDataSource(const uint8_t *data, size_t size);
+    void getData();
+
+   private:
+    FuzzedDataProvider mFdp;
+    sp<StagefrightMetadataRetriever> mMdRetriever = nullptr;
+    const int32_t mDataSourceFd;
+};
+
+void MetadataRetrieverFuzzer::getData() {
+    int64_t timeUs = mFdp.ConsumeIntegral<int64_t>();
+    int32_t option = mFdp.ConsumeIntegral<int32_t>();
+    int32_t colorFormat = mFdp.ConsumeIntegral<int32_t>();
+    bool metaOnly = mFdp.ConsumeBool();
+    mMdRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
+
+    int32_t index = mFdp.ConsumeIntegral<int32_t>();
+    colorFormat = mFdp.ConsumeIntegral<int32_t>();
+    metaOnly = mFdp.ConsumeBool();
+    bool thumbnail = mFdp.ConsumeBool();
+    mMdRetriever->getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
+
+    index = mFdp.ConsumeIntegral<int32_t>();
+    colorFormat = mFdp.ConsumeIntegral<int32_t>();
+    int32_t left = mFdp.ConsumeIntegral<int32_t>();
+    int32_t top = mFdp.ConsumeIntegral<int32_t>();
+    int32_t right = mFdp.ConsumeIntegral<int32_t>();
+    int32_t bottom = mFdp.ConsumeIntegral<int32_t>();
+    mMdRetriever->getImageRectAtIndex(index, colorFormat, left, top, right, bottom);
+
+    index = mFdp.ConsumeIntegral<int32_t>();
+    colorFormat = mFdp.ConsumeIntegral<int32_t>();
+    metaOnly = mFdp.ConsumeBool();
+    mMdRetriever->getFrameAtIndex(index, colorFormat, metaOnly);
+
+    mMdRetriever->extractAlbumArt();
+
+    int32_t keyCode = mFdp.ConsumeIntegral<int32_t>();
+    mMdRetriever->extractMetadata(keyCode);
+}
+
+bool MetadataRetrieverFuzzer::setDataSource(const uint8_t *data, size_t size) {
+    status_t status = -1;
+
+    enum DataSourceChoice {FromHttp, FromFd, FromFileSource, kMaxValue = FromFileSource};
+    switch (mFdp.ConsumeEnum<DataSourceChoice>()) {
+        case FromHttp: {
+            KeyedVector<String8, String8> mHeaders;
+            mHeaders.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
+                         String8(mFdp.ConsumeRandomLengthString().c_str()));
+
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
+
+            string uri("data:");
+            uri += ";base64,";
+            AString out;
+            encodeBase64(uriSuffix.data(), uriSuffix.size(), &out);
+            uri += out.c_str();
+            status = mMdRetriever->setDataSource(nullptr /*httpService*/, uri.c_str(), &mHeaders);
+            break;
+        }
+        case FromFd: {
+            write(mDataSourceFd, data, size);
+
+            status = mMdRetriever->setDataSource(mDataSourceFd, 0, size);
+            break;
+        }
+        case FromFileSource: {
+            write(mDataSourceFd, data, size);
+
+            sp<DataSource> dataSource = new FileSource(dup(mDataSourceFd), 0, size);
+            status = mMdRetriever->setDataSource(dataSource, mFdp.PickValueInArray(kMimeTypes));
+            break;
+        }
+    }
+
+    if (status != 0) {
+        return false;
+    }
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+    MetadataRetrieverFuzzer mrtFuzzer(data, size);
+    ProcessState::self()->startThreadPool();
+    if (mrtFuzzer.setDataSource(data, size)) {
+        mrtFuzzer.getData();
+    }
+    return 0;
+}
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index af9cf45..c3bd207 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -62,7 +62,8 @@
             binder::Status status = mPowerManager->acquireWakeLock(
                     binder, POWERMANAGER_PARTIAL_WAKE_LOCK,
                     String16("AWakeLock"), String16("media"),
-                    {} /* workSource */, {} /* historyTag */, -1 /* displayId */);
+                    {} /* workSource */, {} /* historyTag */, -1 /* displayId */,
+                    nullptr /* callback */);
             IPCThreadState::self()->restoreCallingIdentity(token);
             if (status.isOk()) {
                 mWakeLockToken = binder;
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 6d338db..71a3168 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -37,20 +37,22 @@
         "StreamingSource.cpp",
     ],
 
+    local_include_dirs: [
+        "include/nuplayer",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
     header_libs: [
         "libmediadrm_headers",
         "libmediametrics_headers",
         "media_plugin_headers",
-    ],
-
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/av/media/libstagefright/httplive",
-        "frameworks/av/media/libstagefright/include",
-        "frameworks/av/media/libstagefright/mpeg2ts",
-        "frameworks/av/media/libstagefright/rtsp",
-        "frameworks/av/media/libstagefright/timedtext",
-        "frameworks/native/include/android",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
+        "libstagefright_mpeg2support_headers",
+        "libstagefright_rtsp_headers",
     ],
 
     cflags: [
@@ -74,10 +76,12 @@
         "libmedia",
         "libmediadrm",
         "libpowermanager",
+        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
         "libplayerservice_datasource",
+        "libstagefright_timedtext",
     ],
 
     name: "libstagefright_nuplayer",
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 439dbe8..36e4d4a 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -20,7 +20,6 @@
 #include "GenericSource.h"
 #include "NuPlayerDrm.h"
 
-#include "AnotherPacketSource.h"
 #include <binder/IServiceManager.h>
 #include <cutils/properties.h>
 #include <datasource/PlayerServiceDataSourceFactory.h>
@@ -44,6 +43,7 @@
 #include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
+#include <mpeg2ts/AnotherPacketSource.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 77e7885..4e71e89 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -19,8 +19,6 @@
 #include <utils/Log.h>
 
 #include "HTTPLiveSource.h"
-
-#include "AnotherPacketSource.h"
 #include "LiveDataSource.h"
 
 #include <media/IMediaHTTPService.h>
@@ -31,6 +29,7 @@
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/Utils.h>
+#include <mpeg2ts/AnotherPacketSource.h>
 
 // default buffer prepare/ready/underflow marks
 static const int kReadyMarkMs     = 5000;  // 5 seconds
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 9ae7ddb..9b4fc8f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -35,9 +35,7 @@
 #include "RTSPSource.h"
 #include "StreamingSource.h"
 #include "GenericSource.h"
-#include "TextDescriptions.h"
-
-#include "ATSParser.h"
+#include <timedtext/TextDescriptions.h>
 
 #include <cutils/properties.h>
 
@@ -56,6 +54,8 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 
+#include <mpeg2ts/ATSParser.h>
+
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 
@@ -2893,6 +2893,38 @@
             in.writeInt32(recvTimeUs & 0xFFFFFFFF);
             break;
         }
+        case ARTPSource::RTCP_RR:
+        {
+            int64_t recvTimeUs;
+            int32_t senderId;
+            int32_t ssrc;
+            int32_t fraction;
+            int32_t lost;
+            int32_t lastSeq;
+            int32_t jitter;
+            int32_t lsr;
+            int32_t dlsr;
+            CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+            CHECK(msg->findInt32("rtcp-rr-ssrc", &senderId));
+            CHECK(msg->findInt32("rtcp-rrb-ssrc", &ssrc));
+            CHECK(msg->findInt32("rtcp-rrb-fraction", &fraction));
+            CHECK(msg->findInt32("rtcp-rrb-lost", &lost));
+            CHECK(msg->findInt32("rtcp-rrb-lastSeq", &lastSeq));
+            CHECK(msg->findInt32("rtcp-rrb-jitter", &jitter));
+            CHECK(msg->findInt32("rtcp-rrb-lsr", &lsr));
+            CHECK(msg->findInt32("rtcp-rrb-dlsr", &dlsr));
+            in.writeInt32(recvTimeUs >> 32);
+            in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+            in.writeInt32(senderId);
+            in.writeInt32(ssrc);
+            in.writeInt32(fraction);
+            in.writeInt32(lost);
+            in.writeInt32(lastSeq);
+            in.writeInt32(jitter);
+            in.writeInt32(lsr);
+            in.writeInt32(dlsr);
+            break;
+        }
         case ARTPSource::RTCP_TSFB:   // RTCP TSFB
         case ARTPSource::RTCP_PSFB:   // RTCP PSFB
         case ARTPSource::RTP_AUTODOWN:
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 2c1f158..52b2041 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -40,10 +40,9 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/SurfaceUtils.h>
+#include <mpeg2ts/ATSParser.h>
 #include <gui/Surface.h>
 
-#include "ATSParser.h"
-
 namespace android {
 
 static float kDisplayRefreshingRate = 60.f; // TODO: get this from the display
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
index 793014e..cb91fd9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -30,8 +30,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaErrors.h>
-
-#include "ATSParser.h"
+#include <mpeg2ts/ATSParser.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 4a65f71..cb050f9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -73,6 +73,10 @@
 // is closed to allow the audio DSP to power down.
 static const int64_t kOffloadPauseMaxUs = 10000000LL;
 
+// Additional delay after teardown before releasing the wake lock to allow time for the audio path
+// to be completely released
+static const int64_t kWakelockReleaseDelayUs = 2000000LL;
+
 // Maximum allowed delay from AudioSink, 1.5 seconds.
 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000LL;
 
@@ -793,6 +797,20 @@
             }
             ALOGV("Audio Offload tear down due to pause timeout.");
             onAudioTearDown(kDueToTimeout);
+            sp<AMessage> newMsg = new AMessage(kWhatReleaseWakeLock, this);
+            newMsg->setInt32("drainGeneration", generation);
+            newMsg->post(kWakelockReleaseDelayUs);
+            break;
+        }
+
+        case kWhatReleaseWakeLock:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("drainGeneration", &generation));
+            if (generation != mAudioOffloadPauseTimeoutGeneration) {
+                break;
+            }
+            ALOGV("releasing audio offload pause wakelock.");
             mWakeLock->release();
             break;
         }
@@ -1673,24 +1691,18 @@
 
         mDrainAudioQueuePending = false;
 
-        if (offloadingAudio()) {
-            mAudioSink->pause();
-            mAudioSink->flush();
-            if (!mPaused) {
-                mAudioSink->start();
-            }
-        } else {
-            mAudioSink->pause();
-            mAudioSink->flush();
+        mAudioSink->pause();
+        mAudioSink->flush();
+        if (!offloadingAudio()) {
             // Call stop() to signal to the AudioSink to completely fill the
             // internal buffer before resuming playback.
             // FIXME: this is ignored after flush().
             mAudioSink->stop();
-            if (!mPaused) {
-                mAudioSink->start();
-            }
             mNumFramesWritten = 0;
         }
+        if (!mPaused) {
+            mAudioSink->start();
+        }
         mNextAudioClockUpdateTimeUs = -1;
     } else {
         flushQueue(&mVideoQueue);
@@ -1791,6 +1803,8 @@
         return;
     }
 
+    startAudioOffloadPauseTimeout();
+
     {
         Mutex::Autolock autoLock(mLock);
         // we do not increment audio drain generation so that we fill audio buffer during pause.
@@ -1805,7 +1819,6 @@
 
     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
     mAudioSink->pause();
-    startAudioOffloadPauseTimeout();
 
     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
           mAudioQueue.size(), mVideoQueue.size());
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 4d6a483..6a17972 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -336,7 +336,7 @@
     *durationUs = 0ll;
 
     int64_t audioDurationUs;
-    if (mAudioTrack != NULL
+    if (mAudioTrack != NULL && mAudioTrack->getFormat() != NULL
             && mAudioTrack->getFormat()->findInt64(
                 kKeyDuration, &audioDurationUs)
             && audioDurationUs > *durationUs) {
@@ -344,7 +344,7 @@
     }
 
     int64_t videoDurationUs;
-    if (mVideoTrack != NULL
+    if (mVideoTrack != NULL && mVideoTrack->getFormat() != NULL
             && mVideoTrack->getFormat()->findInt64(
                 kKeyDuration, &videoDurationUs)
             && videoDurationUs > *durationUs) {
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 8e05de8..75cedcc 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -20,13 +20,12 @@
 
 #include "RTSPSource.h"
 
-#include "AnotherPacketSource.h"
-#include "MyHandler.h"
-#include "SDPLoader.h"
-
 #include <media/IMediaHTTPService.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
+#include <media/stagefright/rtsp/MyHandler.h>
+#include <media/stagefright/rtsp/SDPLoader.h>
+#include <mpeg2ts/AnotherPacketSource.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index bec27d3..9d67ca4 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -20,8 +20,6 @@
 
 #include "StreamingSource.h"
 
-#include "ATSParser.h"
-#include "AnotherPacketSource.h"
 #include "NuPlayerStreamListener.h"
 
 #include <media/stagefright/MediaSource.h>
@@ -31,6 +29,8 @@
 #include <media/stagefright/foundation/MediaKeys.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
+#include <mpeg2ts/AnotherPacketSource.h>
+#include <mpeg2ts/ATSParser.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/AWakeLock.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/AWakeLock.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/AWakeLock.h
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/GenericSource.h
similarity index 99%
rename from media/libmediaplayerservice/nuplayer/GenericSource.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/GenericSource.h
index 7a2ab8f..80e06f1 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/GenericSource.h
@@ -21,11 +21,10 @@
 #include "NuPlayer.h"
 #include "NuPlayerSource.h"
 
-#include "ATSParser.h"
-
 #include <android-base/unique_fd.h>
 #include <media/mediaplayer.h>
 #include <media/stagefright/MediaBuffer.h>
+#include <mpeg2ts/ATSParser.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/HTTPLiveSource.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/HTTPLiveSource.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayer.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerCCDecoder.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerCCDecoder.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDecoder.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDecoder.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDecoderBase.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDecoderBase.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDecoderPassThrough.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDecoderPassThrough.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDriver.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDrm.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerDrm.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
similarity index 99%
rename from media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index 3d2b033..3640678 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -100,6 +100,7 @@
         kWhatMediaRenderingStart      = 'mdrd',
         kWhatAudioTearDown            = 'adTD',
         kWhatAudioOffloadPauseTimeout = 'aOPT',
+        kWhatReleaseWakeLock          = 'adRL',
     };
 
     enum AudioTearDownReason {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerSource.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerSource.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerSource.h
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerStreamListener.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerStreamListener.h
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
similarity index 95%
rename from media/libmediaplayerservice/nuplayer/RTPSource.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
index 3b4f9e9..7d9bb8f 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
@@ -23,25 +23,20 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaSource.h>
+#include <media/stagefright/rtsp/APacketSource.h>
+#include <media/stagefright/rtsp/ARTPConnection.h>
+#include <media/stagefright/rtsp/ARTPSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 #include <media/stagefright/Utils.h>
 #include <media/BufferingSettings.h>
+#include <mpeg2ts/AnotherPacketSource.h>
 
 #include <utils/KeyedVector.h>
 #include <utils/Vector.h>
 #include <utils/RefBase.h>
 
-#include "AnotherPacketSource.h"
-#include "APacketSource.h"
-#include "ARTPConnection.h"
-#include "ARTPSource.h"
-#include "ASessionDescription.h"
 #include "NuPlayerSource.h"
 
-
-
-
-
-
 namespace android {
 
 struct ALooper;
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTSPSource.h
similarity index 99%
rename from media/libmediaplayerservice/nuplayer/RTSPSource.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/RTSPSource.h
index 03fce08..7497e41 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTSPSource.h
@@ -20,7 +20,7 @@
 
 #include "NuPlayerSource.h"
 
-#include "ATSParser.h"
+#include <mpeg2ts/ATSParser.h>
 
 namespace android {
 
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/StreamingSource.h
similarity index 100%
rename from media/libmediaplayerservice/nuplayer/StreamingSource.h
rename to media/libmediaplayerservice/nuplayer/include/nuplayer/StreamingSource.h
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
index 98626fd..99202b8 100644
--- a/media/libmediaplayerservice/tests/Android.bp
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -30,7 +30,7 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     include_dirs: [
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index c81a659..2beb47f 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -61,7 +61,11 @@
         return true;
     }
 
-    virtual bool isValidPid(int /* pid */) {
+    virtual bool isPidTrusted(int /* pid */) {
+        return true;
+    }
+
+    virtual bool isPidUidTrusted(int /* pid */, int /* uid */) {
         return true;
     }
 
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
index 92236ea..6eb8c6f 100644
--- a/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/Android.bp
@@ -33,10 +33,6 @@
         "StagefrightRecorderTest.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libmediaplayerservice",
-    ],
-
     static_libs: [
         "libmediaplayerservice",
         "libstagefright_httplive",
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 042850c..937650f 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -106,8 +106,8 @@
     export_include_dirs: ["include"],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
-        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
+        "resourceobserver_aidl_interface-V1-ndk",
         "libstatslog_media",
     ],
 
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
index 9311e2e..ea63da8 100644
--- a/media/libmediatranscoding/include/media/ControllerClientInterface.h
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -66,7 +66,7 @@
      * Returns false if the session doesn't exist, or the client is already requesting the
      * session. Returns true otherwise.
      */
-    virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid);
+    virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) = 0;
 
     /**
      * Retrieves the (unsorted) list of all clients requesting the session identified by
@@ -81,7 +81,7 @@
      * Returns false if the session doesn't exist. Returns true otherwise.
      */
     virtual bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
-                               std::vector<int32_t>* out_clientUids);
+                               std::vector<int32_t>* out_clientUids) = 0;
 
 protected:
     virtual ~ControllerClientInterface() = default;
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 603611a..7a6980f 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -31,7 +31,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
         "libmediatranscoding",
     ],
 
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
index e20f7ab..411b6ef 100644
--- a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -55,8 +55,8 @@
     AMediaFormat_getString(srcFormat, AMEDIAFORMAT_KEY_MIME, &srcMime);
     if (!AMediaFormat_getString(options, AMEDIAFORMAT_KEY_MIME, &dstMime) ||
         strcmp(srcMime, dstMime) == 0) {
-        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, String));
-        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, String));
+        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_PROFILE, Int32));
+        srcParamsToCopy.push_back(ENTRY_COPIER(AMEDIAFORMAT_KEY_LEVEL, Int32));
     }
 
     // ------- Define parameters to copy from the caller's options -------
diff --git a/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h b/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
index 348b4f8..635f67f 100644
--- a/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
+++ b/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
@@ -42,10 +42,6 @@
     //virtual size_t framesUnderrun() const;
     //virtual size_t underruns() const;
 
-    // This is an over-estimate, and could dupe the caller into making a blocking write()
-    // FIXME Use an audio HAL API to query the buffer emptying status when it's available.
-    virtual ssize_t availableToWrite() { return mStreamBufferSizeBytes / mFrameSize; }
-
     virtual ssize_t write(const void *buffer, size_t count);
 
     virtual status_t getTimestamp(ExtendedTimestamp &timestamp);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2c8d8de..52c4c0f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -77,6 +77,7 @@
 namespace {
 
 constexpr char TUNNEL_PEEK_KEY[] = "android._trigger-tunnel-peek";
+constexpr char TUNNEL_PEEK_SET_LEGACY_KEY[] = "android._tunnel-peek-set-legacy";
 
 }
 
@@ -2483,17 +2484,39 @@
         return BAD_VALUE;
     }
 
-    OMX_CONFIG_BOOLEANTYPE config;
-    InitOMXParams(&config);
-    config.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
+    OMX_CONFIG_BOOLEANTYPE tunnelPeekConfig;
+    InitOMXParams(&tunnelPeekConfig);
+    tunnelPeekConfig.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
     status_t err = mOMXNode->setConfig(
             (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeek,
-            &config, sizeof(config));
+            &tunnelPeekConfig, sizeof(tunnelPeekConfig));
     if (err != OK) {
         ALOGE("decoder cannot set %s to %d (err %d)",
-              TUNNEL_PEEK_KEY, tunnelPeek, err);
+                TUNNEL_PEEK_KEY, tunnelPeek, err);
+    }
+    return err;
+}
+
+status_t ACodec::setTunnelPeekLegacy(int32_t isLegacy) {
+    if (mIsEncoder) {
+        ALOGE("encoder does not support %s", TUNNEL_PEEK_SET_LEGACY_KEY);
+        return BAD_VALUE;
+    }
+    if (!mTunneled) {
+        ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_SET_LEGACY_KEY);
+        return BAD_VALUE;
     }
 
+    OMX_CONFIG_BOOLEANTYPE tunnelPeekLegacyModeConfig;
+    InitOMXParams(&tunnelPeekLegacyModeConfig);
+    tunnelPeekLegacyModeConfig.bEnabled = (OMX_BOOL)(isLegacy != 0);
+    status_t err = mOMXNode->setConfig(
+            (OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeekLegacyMode,
+            &tunnelPeekLegacyModeConfig, sizeof(tunnelPeekLegacyModeConfig));
+    if (err != OK) {
+        ALOGE("decoder cannot set video peek legacy mode to %d (err %d)",
+                isLegacy,  err);
+    }
     return err;
 }
 
@@ -3307,10 +3330,12 @@
     if (err != OK) {
         ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
                 sidebandHandle, err);
-        return err;
     }
 
-    return OK;
+    native_handle_close(sidebandHandle);
+    native_handle_delete(sidebandHandle);
+
+    return err;
 }
 
 status_t ACodec::setVideoPortFormatType(
@@ -5434,6 +5459,7 @@
                     notify->setInt32("channel-count", params.nChannels);
                     notify->setInt32("sample-rate", params.nSampleRate);
                     notify->setInt32("bitrate", params.nBitRate);
+                    notify->setInt32("aac-profile", params.eAACProfile);
                     break;
                 }
 
@@ -7931,11 +7957,22 @@
         }
     }
 
-    int32_t tunnelPeek = 0;
-    if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
-        status_t err = setTunnelPeek(tunnelPeek);
-        if (err != OK) {
-            return err;
+    {
+        int32_t tunnelPeek = 0;
+        if (params->findInt32(TUNNEL_PEEK_KEY, &tunnelPeek)) {
+            status_t err = setTunnelPeek(tunnelPeek);
+            if (err != OK) {
+                return err;
+            }
+        }
+    }
+    {
+        int32_t tunnelPeekSetLegacy = 0;
+        if (params->findInt32(TUNNEL_PEEK_SET_LEGACY_KEY, &tunnelPeekSetLegacy)) {
+            status_t err = setTunnelPeekLegacy(tunnelPeekSetLegacy);
+            if (err != OK) {
+                return err;
+            }
         }
     }
 
@@ -9208,4 +9245,19 @@
     return OK;
 }
 
+status_t ACodec::querySupportedParameters(std::vector<std::string> *names) {
+    if (!names) {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+status_t ACodec::subscribeToParameters([[maybe_unused]] const std::vector<std::string> &names) {
+    return OK;
+}
+
+status_t ACodec::unsubscribeFromParameters([[maybe_unused]] const std::vector<std::string> &names) {
+    return OK;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a052a70..10baec4 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -109,6 +109,7 @@
 
     srcs: [
         "CodecBase.cpp",
+        "DataConverter.cpp",
         "FrameRenderTracker.cpp",
         "MediaCodecListWriter.cpp",
         "SkipCutBuffer.cpp",
@@ -125,6 +126,7 @@
     ],
 
     shared_libs: [
+        "libaudioutils",
         "libgui",
         "libhidlallocatorutils",
         "liblog",
@@ -251,6 +253,40 @@
         ],
     },
 }
+
+cc_library_shared {
+    name: "libstagefright_surface_utils",
+
+    srcs: [
+        "SurfaceUtils.cpp",
+    ],
+
+    shared_libs: [
+        "libgui",
+        "liblog",
+        "libui",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
 cc_library {
     name: "libstagefright",
 
@@ -266,7 +302,6 @@
         "CallbackMediaSource.cpp",
         "CameraSource.cpp",
         "CameraSourceTimeLapse.cpp",
-        "DataConverter.cpp",
         "FrameDecoder.cpp",
         "HevcUtils.cpp",
         "InterfaceUtils.cpp",
@@ -340,6 +375,7 @@
         "android.hardware.media.omx@1.0",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
     ],
 
     static_libs: [
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index b6acdc8..bfe8538 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -36,21 +36,9 @@
 
 using content::AttributionSourceState;
 
-static void AudioRecordCallbackFunction(int event, void *user, void *info) {
-    AudioSource *source = (AudioSource *) user;
-    switch (event) {
-        case AudioRecord::EVENT_MORE_DATA: {
-            source->dataCallback(*((AudioRecord::Buffer *) info));
-            break;
-        }
-        case AudioRecord::EVENT_OVERRUN: {
-            ALOGW("AudioRecord reported overrun!");
-            break;
-        }
-        default:
-            // does nothing
-            break;
-    }
+
+void AudioSource::onOverrun() {
+    ALOGW("AudioRecord reported overrun!");
 }
 
 AudioSource::AudioSource(
@@ -129,8 +117,7 @@
         audio_channel_in_mask_from_count(channelCount),
         attributionSource,
         (size_t) (bufCount * frameCount),
-        AudioRecordCallbackFunction,
-        this,
+        wp<AudioRecord::IAudioRecordCallback>{this},
         frameCount /*notificationFrames*/,
         AUDIO_SESSION_ALLOCATE,
         AudioRecord::TRANSFER_DEFAULT,
@@ -359,7 +346,7 @@
     return;
 }
 
-status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
+size_t AudioSource::onMoreData(const AudioRecord::Buffer& audioBuffer) {
     int64_t timeUs, position, timeNs;
     ExtendedTimestamp ts;
     ExtendedTimestamp::Location location;
@@ -384,21 +371,21 @@
 
     ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
     Mutex::Autolock autoLock(mLock);
+
     if (!mStarted) {
         ALOGW("Spurious callback from AudioRecord. Drop the audio data.");
-        return OK;
+        return audioBuffer.size();
     }
 
-    const size_t bufferSize = audioBuffer.size;
 
     // Drop retrieved and previously lost audio data.
     if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) {
         (void) mRecord->getInputFramesLost();
-        int64_t receievedFrames = bufferSize / mRecord->frameSize();
+        int64_t receievedFrames = audioBuffer.size() / mRecord->frameSize();
         ALOGV("Drop audio data(%" PRId64 " frames) at %" PRId64 "/%" PRId64 " us",
                 receievedFrames, timeUs, mStartTimeUs);
         mNumFramesSkipped += receievedFrames;
-        return OK;
+        return audioBuffer.size();
     }
 
     if (mStopSystemTimeUs != -1 && timeUs >= mStopSystemTimeUs) {
@@ -406,7 +393,7 @@
                 (long long)timeUs, (long long)mStopSystemTimeUs);
         mNoMoreFramesToRead = true;
         mFrameAvailableCondition.signal();
-        return OK;
+        return audioBuffer.size();
     }
 
     if (mNumFramesReceived == 0 && mPrevSampleTimeUs == 0) {
@@ -427,7 +414,7 @@
     }
 
     CHECK_EQ(numLostBytes & 1, 0u);
-    CHECK_EQ(audioBuffer.size & 1, 0u);
+    CHECK_EQ(audioBuffer.size() & 1, 0u);
     if (numLostBytes > 0) {
         // Loss of audio frames should happen rarely; thus the LOGW should
         // not cause a logging spam
@@ -449,17 +436,17 @@
         queueInputBuffer_l(lostAudioBuffer, timeUs);
     }
 
-    if (audioBuffer.size == 0) {
+    if (audioBuffer.size() == 0) {
         ALOGW("Nothing is available from AudioRecord callback buffer");
-        return OK;
+        return audioBuffer.size();
     }
 
-    MediaBuffer *buffer = new MediaBuffer(bufferSize);
+    MediaBuffer *buffer = new MediaBuffer(audioBuffer.size());
     memcpy((uint8_t *) buffer->data(),
-            audioBuffer.i16, audioBuffer.size);
-    buffer->set_range(0, bufferSize);
+            audioBuffer.data(), audioBuffer.size());
+    buffer->set_range(0, audioBuffer.size());
     queueInputBuffer_l(buffer, timeUs);
-    return OK;
+    return audioBuffer.size();
 }
 
 void AudioSource::queueInputBuffer_l(MediaBuffer *buffer, int64_t timeUs) {
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 95afa62..9607425 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -564,9 +564,11 @@
     // Set the preview display. Skip this if mSurface is null because
     // applications may already set a surface to the camera.
     if (mSurface != NULL) {
-        // This CHECK is good, since we just passed the lock/unlock
-        // check earlier by calling mCamera->setParameters().
-        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
+        // Surface may be set incorrectly or could already be used even if we just
+        // passed the lock/unlock check earlier by calling mCamera->setParameters().
+        if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
+            return err;
+        }
     }
 
     // Use buffer queue to receive video buffers from camera
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index a241e29..3df8766 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -35,6 +35,7 @@
 #include <media/stagefright/FrameCaptureProcessor.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
@@ -49,7 +50,7 @@
 
 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
-        int32_t dstBpp, bool allocRotated, bool metaOnly) {
+        int32_t dstBpp, uint32_t bitDepth, bool allocRotated, bool metaOnly) {
     int32_t rotationAngle;
     if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
         rotationAngle = 0;  // By default, no rotation
@@ -104,7 +105,7 @@
     }
 
     VideoFrame frame(width, height, displayWidth, displayHeight,
-            tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
+            tileWidth, tileHeight, rotationAngle, dstBpp, bitDepth, !metaOnly, iccSize);
 
     size_t size = frame.getFlattenedSize();
     sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
@@ -125,15 +126,15 @@
 
 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
-        int32_t dstBpp, bool allocRotated = false) {
-    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp,
+        int32_t dstBpp, uint8_t bitDepth, bool allocRotated = false) {
+    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth,
             allocRotated, false /*metaOnly*/);
 }
 
 sp<IMemory> allocMetaFrame(const sp<MetaData>& trackMeta,
         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
-        int32_t dstBpp) {
-    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp,
+        int32_t dstBpp, uint8_t bitDepth) {
+    return allocVideoFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth,
             false /*allocRotated*/, true /*metaOnly*/);
 }
 
@@ -192,6 +193,13 @@
             *dstBpp = 4;
             return true;
         }
+        case HAL_PIXEL_FORMAT_RGBA_1010102:
+        {
+            *dstFormat = (OMX_COLOR_FORMATTYPE)COLOR_Format32bitABGR2101010;
+            *captureFormat = ui::PixelFormat::RGBA_1010102;
+            *dstBpp = 4;
+            return true;
+        }
         default:
         {
             ALOGE("Unsupported color format: %d", colorFormat);
@@ -203,7 +211,7 @@
 
 //static
 sp<IMemory> FrameDecoder::getMetadataOnly(
-        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
+        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
     OMX_COLOR_FORMATTYPE dstFormat;
     ui::PixelFormat captureFormat;
     int32_t dstBpp;
@@ -227,7 +235,8 @@
         }
     }
 
-    sp<IMemory> metaMem = allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp);
+    sp<IMemory> metaMem =
+            allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth);
 
     // try to fill sequence meta's duration based on average frame rate,
     // default to 33ms if frame rate is unavailable.
@@ -262,13 +271,10 @@
 }
 
 bool isHDR(const sp<AMessage> &format) {
-    uint32_t standard, range, transfer;
+    uint32_t standard, transfer;
     if (!format->findInt32("color-standard", (int32_t*)&standard)) {
         standard = 0;
     }
-    if (!format->findInt32("color-range", (int32_t*)&range)) {
-        range = 0;
-    }
     if (!format->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
@@ -526,8 +532,11 @@
         return NULL;
     }
 
-    // TODO: Use Flexible color instead
-    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    if (dstFormat() == COLOR_Format32bitABGR2101010) {
+        videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
+    } else {
+        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    }
 
     // For the thumbnail extraction case, try to allocate single buffer in both
     // input and output ports, if seeking to a sync frame. NOTE: This request may
@@ -635,6 +644,16 @@
         crop_bottom = height - 1;
     }
 
+    int32_t slice_height;
+    if (outputFormat->findInt32("slice-height", &slice_height) && slice_height > 0) {
+        height = slice_height;
+    }
+
+    uint32_t bitDepth = 8;
+    if (COLOR_FormatYUVP010 == srcFormat) {
+        bitDepth = 10;
+    }
+
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
                 trackMeta(),
@@ -643,6 +662,7 @@
                 0,
                 0,
                 dstBpp(),
+                bitDepth,
                 mCaptureLayer != nullptr /*allocRotated*/);
         if (frameMem == nullptr) {
             return NO_MEMORY;
@@ -834,8 +854,11 @@
         return NULL;
     }
 
-    // TODO: Use Flexible color instead
-    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    if (dstFormat() == COLOR_Format32bitABGR2101010) {
+        videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
+    } else {
+        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    }
 
     if ((mGridRows == 1) && (mGridCols == 1)) {
         videoFormat->setInt32("android._num-input-buffers", 1);
@@ -890,7 +913,7 @@
         return ERROR_MALFORMED;
     }
 
-    int32_t width, height, stride;
+    int32_t width, height, stride, srcFormat;
     if (outputFormat->findInt32("width", &width) == false) {
         ALOGE("MediaImageDecoder::onOutputReceived:width is missing in outputFormat");
         return ERROR_MALFORMED;
@@ -903,10 +926,19 @@
         ALOGE("MediaImageDecoder::onOutputReceived:stride is missing in outputFormat");
         return ERROR_MALFORMED;
     }
+    if (outputFormat->findInt32("color-format", &srcFormat) == false) {
+        ALOGE("MediaImageDecoder::onOutputReceived: color format is missing in outputFormat");
+        return ERROR_MALFORMED;
+    }
+
+    uint32_t bitDepth = 8;
+    if (COLOR_FormatYUVP010 == srcFormat) {
+        bitDepth = 10;
+    }
 
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
-                trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
+                trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp(), bitDepth);
 
         if (frameMem == nullptr) {
             return NO_MEMORY;
@@ -917,9 +949,6 @@
         setFrame(frameMem);
     }
 
-    int32_t srcFormat;
-    CHECK(outputFormat->findInt32("color-format", &srcFormat));
-
     ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
@@ -941,6 +970,11 @@
         crop_bottom = height - 1;
     }
 
+    int32_t slice_height;
+    if (outputFormat->findInt32("slice-height", &slice_height) && slice_height > 0) {
+        height = slice_height;
+    }
+
     int32_t crop_width, crop_height;
     crop_width = crop_right - crop_left + 1;
     crop_height = crop_bottom - crop_top + 1;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7c7fcac..63d3180 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -36,6 +36,7 @@
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALookup.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
@@ -44,6 +45,7 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
 #include <media/mediarecorder.h>
@@ -156,7 +158,7 @@
     bool isHeic() const { return mIsHeic; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
-    bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
+    bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
     bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
     void addChunkOffset(off64_t offset);
     void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
@@ -164,6 +166,7 @@
     TrackId& getTrackId() { return mTrackId; }
     status_t dump(int fd, const Vector<String16>& args) const;
     static const char *getFourCCForMime(const char *mime);
+    const char *getDoviFourCC() const;
     const char *getTrackType() const;
     void resetInternal();
     int64_t trackMetaDataSize();
@@ -316,6 +319,7 @@
     volatile bool mStarted;
     bool mIsAvc;
     bool mIsHevc;
+    bool mIsDovi;
     bool mIsAudio;
     bool mIsVideo;
     bool mIsHeic;
@@ -370,6 +374,8 @@
     uint8_t mProfileCompatible;
     uint8_t mLevelIdc;
 
+    int32_t mDoviProfile;
+
     void *mCodecSpecificData;
     size_t mCodecSpecificDataSize;
     bool mGotAllCodecSpecificData;
@@ -422,6 +428,8 @@
     status_t parseHEVCCodecSpecificData(
             const uint8_t *data, size_t size, HevcParameterSets &paramSets);
 
+    status_t getDolbyVisionProfile();
+
     // Track authoring progress status
     void trackProgressStatus(int64_t timeUs, status_t err = OK);
     void initTrackingProgressStatus(MetaData *params);
@@ -459,6 +467,7 @@
     void writePaspBox();
     void writeAvccBox();
     void writeHvccBox();
+    void writeDoviConfigBox();
     void writeUrlBox();
     void writeDrefBox();
     void writeDinfBox();
@@ -470,6 +479,7 @@
     void writeHdlrBox();
     void writeTkhdBox(uint32_t now);
     void writeColrBox();
+    void writeMdcvAndClliBoxes();
     void writeMp4aEsdsBox();
     void writeMp4vEsdsBox();
     void writeAudioFourCCBox();
@@ -617,6 +627,17 @@
     return OK;
 }
 
+const char *MPEG4Writer::Track::getDoviFourCC() const {
+    if (mDoviProfile == DolbyVisionProfileDvheStn) {
+        return "dvh1";
+    } else if (mDoviProfile == DolbyVisionProfileDvheSt) {
+        return "hvc1";
+    } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
+        return "avc1";
+    }
+    return nullptr;
+}
+
 // static
 const char *MPEG4Writer::Track::getFourCCForMime(const char *mime) {
     if (mime == NULL) {
@@ -671,7 +692,14 @@
         mIsBackgroundMode |= isBackgroundMode;
     }
 
-    if (Track::getFourCCForMime(mime) == NULL) {
+    if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+        // For MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        // getFourCCForMime() requires profile information
+        // to decide the final FourCC codes.
+        // So we let the creation of the new track now and
+        // assign FourCC codes later using getDoviFourCC()
+        ALOGV("Add source mime '%s'", mime);
+    } else if (Track::getFourCCForMime(mime) == NULL) {
         ALOGE("Unsupported mime '%s'", mime);
         return ERROR_UNSUPPORTED;
     }
@@ -2150,6 +2178,7 @@
       mMinCttsOffsetTimeUs(0),
       mMinCttsOffsetTicks(0),
       mMaxCttsOffsetTicks(0),
+      mDoviProfile(0),
       mCodecSpecificData(NULL),
       mCodecSpecificDataSize(0),
       mGotAllCodecSpecificData(false),
@@ -2176,6 +2205,7 @@
     mMeta->findCString(kKeyMIMEType, &mime);
     mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
     mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
@@ -2610,7 +2640,12 @@
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
-        mMeta->findData(kKeyDVCC, &type, &data, &size);
+        getDolbyVisionProfile();
+        if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
+                !mMeta->findData(kKeyHVCC, &type, &data, &size)) {
+            ALOGE("Failed: No HVCC/AVCC for Dolby Vision ..\n");
+            return;
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
@@ -2651,6 +2686,7 @@
         free(mCodecSpecificData);
         mCodecSpecificData = NULL;
     }
+
 }
 
 void MPEG4Writer::Track::initTrackingProgressStatus(MetaData *params) {
@@ -3329,6 +3365,40 @@
     return OK;
 }
 
+status_t MPEG4Writer::Track::getDolbyVisionProfile() {
+    uint32_t type;
+    const void *data = NULL;
+    size_t size = 0;
+
+    if (!mMeta->findData(kKeyDVCC, &type, &data, &size) &&
+        !mMeta->findData(kKeyDVVC, &type, &data, &size) &&
+        !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+            ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
+            return ERROR_MALFORMED;
+    }
+    static const ALookup<uint8_t, int32_t> dolbyVisionProfileMap = {
+        {1, DolbyVisionProfileDvavPen},
+        {3, DolbyVisionProfileDvheDen},
+        {4, DolbyVisionProfileDvheDtr},
+        {5, DolbyVisionProfileDvheStn},
+        {6, DolbyVisionProfileDvheDth},
+        {7, DolbyVisionProfileDvheDtb},
+        {8, DolbyVisionProfileDvheSt},
+        {9, DolbyVisionProfileDvavSe},
+        {10, DolbyVisionProfileDvav110}
+    };
+
+    // Dolby Vision profile information is extracted as per
+    // https://dolby.my.salesforce.com/sfc/p/#700000009YuG/a/4u000000l6FB/076wHYEmyEfz09m0V1bo85_25hlUJjaiWTbzorNmYY4
+    uint8_t dv_profile = ((((uint8_t *)data)[2] >> 1) & 0x7f);
+
+    if (!dolbyVisionProfileMap.map(dv_profile, &mDoviProfile)) {
+      ALOGE("Failed to get Dolby Profile from DV Config data");
+      return ERROR_MALFORMED;
+    }
+    return OK;
+}
+
 /*
  * Updates the drift time from the audio track so that
  * the video track can get the updated drift time information
@@ -3474,8 +3544,27 @@
                     err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
                             buffer->range_length());
                 }
+                if (mIsDovi) {
+                    err = getDolbyVisionProfile();
+                    if(err == OK) {
+                        const void *data = NULL;
+                        size_t size = 0;
+                        uint32_t type = 0;
+                        if (mDoviProfile == DolbyVisionProfileDvavSe) {
+                            mMeta->findData(kKeyAVCC, &type, &data, &size);
+                        } else if (mDoviProfile < DolbyVisionProfileDvavSe) {
+                            mMeta->findData(kKeyHVCC, &type, &data, &size);
+                        } else {
+                            ALOGW("DV Profiles > DolbyVisionProfileDvavSe are not supported");
+                            err = ERROR_MALFORMED;
+                        }
+                        if (err == OK && data != NULL &&
+                            copyCodecSpecificData((uint8_t *)data, size) == OK) {
+                                mGotAllCodecSpecificData = true;
+                        }
+                    }
+                }
             }
-
             buffer->release();
             buffer = NULL;
             if (OK != err) {
@@ -4173,6 +4262,7 @@
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
         if (!mCodecSpecificData ||
             mCodecSpecificDataSize <= 0) {
@@ -4297,7 +4387,13 @@
     const char *mime;
     bool success = mMeta->findCString(kKeyMIMEType, &mime);
     CHECK(success);
-    const char *fourcc = getFourCCForMime(mime);
+    const char *fourcc;
+    if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+        fourcc = getDoviFourCC();
+    } else {
+        fourcc = getFourCCForMime(mime);
+    }
+
     if (fourcc == NULL) {
         ALOGE("Unknown mime type '%s'.", mime);
         TRESPASS();
@@ -4337,10 +4433,20 @@
         writeAvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
         writeHvccBox();
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
+        if (mDoviProfile <= DolbyVisionProfileDvheSt) {
+            writeHvccBox();
+        } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
+            writeAvccBox();
+        } else {
+          TRESPASS("Unsupported Dolby Vision profile");
+        }
+        writeDoviConfigBox();
     }
 
     writePaspBox();
     writeColrBox();
+    writeMdcvAndClliBoxes();
     mOwner->endBox();  // mp4v, s263 or avc1
 }
 
@@ -4349,30 +4455,82 @@
     memset(&aspects, 0, sizeof(aspects));
     // Color metadata may have changed.
     sp<MetaData> meta = mSource->getFormat();
-    // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
-        int32_t primaries, transfer, coeffs;
-        bool fullRange;
-        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
-                asString(aspects.mPrimaries),
-                asString(aspects.mTransfer),
-                asString(aspects.mMatrixCoeffs),
-                asString(aspects.mRange));
-        ColorUtils::convertCodecColorAspectsToIsoAspects(
-                aspects, &primaries, &transfer, &coeffs, &fullRange);
-        mOwner->beginBox("colr");
-        mOwner->writeFourcc("nclx");
-        mOwner->writeInt16(primaries);
-        mOwner->writeInt16(transfer);
-        mOwner->writeInt16(coeffs);
-        mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
-        mOwner->endBox(); // colr
-    } else {
+    bool findPrimaries = meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries);
+    bool findTransfer = meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer);
+    bool findMatrix = meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs);
+    bool findRange = meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange);
+    if (!findPrimaries && !findTransfer && !findMatrix && !findRange) {
         ALOGV("no color information");
+        return;
     }
+
+    int32_t primaries, transfer, coeffs;
+    bool fullRange;
+    ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+            asString(aspects.mPrimaries),
+            asString(aspects.mTransfer),
+            asString(aspects.mMatrixCoeffs),
+            asString(aspects.mRange));
+    ColorUtils::convertCodecColorAspectsToIsoAspects(
+            aspects, &primaries, &transfer, &coeffs, &fullRange);
+    mOwner->beginBox("colr");
+    mOwner->writeFourcc("nclx");
+    mOwner->writeInt16(primaries);
+    mOwner->writeInt16(transfer);
+    mOwner->writeInt16(coeffs);
+    mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
+    mOwner->endBox(); // colr
+}
+
+void MPEG4Writer::Track::writeMdcvAndClliBoxes() {
+    sp<MetaData> meta = mSource->getFormat();
+    uint32_t type;
+    const uint8_t* data;
+    size_t size;
+    bool found =
+            meta->findData(kKeyHdrStaticInfo, &type, reinterpret_cast<const void**>(&data), &size);
+    if (!found) {
+        return; // Nothing to encode.
+    }
+    if (size != 25) {
+        ALOGW("Ignoring HDR static info with unexpected size %d", (int)size);
+        return;
+    }
+    uint16_t displayPrimariesRX = U16LE_AT(&data[1]);
+    uint16_t displayPrimariesRY = U16LE_AT(&data[3]);
+
+    uint16_t displayPrimariesGX = U16LE_AT(&data[5]);
+    uint16_t displayPrimariesGY = U16LE_AT(&data[7]);
+
+    uint16_t displayPrimariesBX = U16LE_AT(&data[9]);
+    uint16_t displayPrimariesBY = U16LE_AT(&data[11]);
+
+    uint16_t whitePointX = U16LE_AT(&data[13]);
+    uint16_t whitePointY = U16LE_AT(&data[15]);
+
+    uint16_t maxDisplayMasteringLuminance = U16LE_AT(&data[17]);
+    uint16_t minDisplayMasteringLuminance = U16LE_AT(&data[19]);
+
+    uint16_t maxContentLightLevel = U16LE_AT(&data[21]);
+    uint16_t maxPicAverageLightLevel = U16LE_AT(&data[23]);
+
+    mOwner->beginBox("mdcv");
+    mOwner->writeInt16(displayPrimariesGX);
+    mOwner->writeInt16(displayPrimariesGY);
+    mOwner->writeInt16(displayPrimariesBX);
+    mOwner->writeInt16(displayPrimariesBY);
+    mOwner->writeInt16(displayPrimariesRX);
+    mOwner->writeInt16(displayPrimariesRY);
+    mOwner->writeInt16(whitePointX);
+    mOwner->writeInt16(whitePointY);
+    mOwner->writeInt32(maxDisplayMasteringLuminance * 10000);
+    mOwner->writeInt32(minDisplayMasteringLuminance * 10000);
+    mOwner->endBox();  // mdcv.
+
+    mOwner->beginBox("clli");
+    mOwner->writeInt16(maxContentLightLevel);
+    mOwner->writeInt16(maxPicAverageLightLevel);
+    mOwner->endBox();  // clli.
 }
 
 void MPEG4Writer::Track::writeAudioFourCCBox() {
@@ -4829,12 +4987,11 @@
     mOwner->endBox();  // avcC
 }
 
-
 void MPEG4Writer::Track::writeHvccBox() {
     CHECK(mCodecSpecificData);
     CHECK_GE(mCodecSpecificDataSize, 5u);
 
-    // Patch avcc's lengthSize field to match the number
+    // Patch hvcc's lengthSize field to match the number
     // of bytes we use to indicate the size of a nal unit.
     uint8_t *ptr = (uint8_t *)mCodecSpecificData;
     ptr[21] = (ptr[21] & 0xfc) | (mOwner->useNalLengthFour() ? 3 : 1);
@@ -4843,6 +5000,32 @@
     mOwner->endBox();  // hvcC
 }
 
+void MPEG4Writer::Track::writeDoviConfigBox() {
+    CHECK_NE(mDoviProfile, 0u);
+
+    uint32_t type = 0;
+    const void *data = nullptr;
+    size_t size = 0;
+    // check to see which key has the configuration box.
+    if (mMeta->findData(kKeyDVCC, &type, &data, &size) ||
+        mMeta->findData(kKeyDVVC, &type, &data, &size) ||
+        mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+
+       // if this box is present we write the box, or
+       // this mp4 will be interpreted as a backward
+       // compatible stream.
+        if (mDoviProfile > DolbyVisionProfileDvav110) {
+            mOwner->beginBox("dvwC");
+        } else if (mDoviProfile > DolbyVisionProfileDvheDtb) {
+            mOwner->beginBox("dvvC");
+        } else {
+            mOwner->beginBox("dvcC");
+        }
+        mOwner->write(data, size);
+        mOwner->endBox();  // dvwC/dvvC/dvcC
+    }
+}
+
 void MPEG4Writer::Track::writeD263Box() {
     mOwner->beginBox("d263");
     mOwner->writeInt32(0);  // vendor
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index da581fb..e50880a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -102,6 +102,8 @@
 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
 static const char *kCodecModeAudio = "audio";
+static const char *kCodecModeImage = "image";
+static const char *kCodecModeUnknown = "unknown";
 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
@@ -112,6 +114,15 @@
 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
 static const char *kCodecPriority = "android.media.mediacodec.priority";
+static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
+static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
+static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
+static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
+static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
+static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
+static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
 
 // Min/Max QP before shaping
 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -177,9 +188,12 @@
 // XXX suppress until we get our representation right
 static bool kEmitHistogram = false;
 
+static int64_t getId(IResourceManagerClient const * client) {
+    return (int64_t) client;
+}
 
 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
-    return (int64_t) client.get();
+    return getId(client.get());
 }
 
 static bool isResourceError(status_t err) {
@@ -196,12 +210,20 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 struct ResourceManagerClient : public BnResourceManagerClient {
-    explicit ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {}
+    explicit ResourceManagerClient(MediaCodec* codec, int32_t pid) :
+            mMediaCodec(codec), mPid(pid) {}
 
     Status reclaimResource(bool* _aidl_return) override {
         sp<MediaCodec> codec = mMediaCodec.promote();
         if (codec == NULL) {
-            // codec is already gone.
+            // Codec is already gone, so remove the resources as well
+            ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+            std::shared_ptr<IResourceManagerService> service =
+                    IResourceManagerService::fromBinder(binder);
+            if (service == nullptr) {
+                ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
+            }
+            service->removeClient(mPid, getId(this));
             *_aidl_return = true;
             return Status::ok();
         }
@@ -238,6 +260,7 @@
 
 private:
     wp<MediaCodec> mMediaCodec;
+    int32_t mPid;
 
     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
 };
@@ -247,7 +270,7 @@
             const std::shared_ptr<IResourceManagerClient> &client);
     virtual ~ResourceManagerServiceProxy();
 
-    void init();
+    status_t init();
 
     // implements DeathRecipient
     static void BinderDiedCallback(void* cookie);
@@ -276,6 +299,9 @@
         pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
         : mPid(pid), mUid(uid), mClient(client),
           mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
+    if (mUid == MediaCodec::kNoUid) {
+        mUid = AIBinder_getCallingUid();
+    }
     if (mPid == MediaCodec::kNoPid) {
         mPid = AIBinder_getCallingPid();
     }
@@ -294,12 +320,26 @@
     }
 }
 
-void MediaCodec::ResourceManagerServiceProxy::init() {
+status_t MediaCodec::ResourceManagerServiceProxy::init() {
     ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
     mService = IResourceManagerService::fromBinder(binder);
     if (mService == nullptr) {
         ALOGE("Failed to get ResourceManagerService");
-        return;
+        return UNKNOWN_ERROR;
+    }
+
+    int callerPid = AIBinder_getCallingPid();
+    int callerUid = AIBinder_getCallingUid();
+    if (mPid != callerPid || mUid != callerUid) {
+        // Media processes don't need special permissions to act on behalf of other processes.
+        if (callerUid != AID_MEDIA) {
+            char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
+            if (!checkCallingPermission(String16(permission))) {
+                ALOGW("%s is required to override the caller's PID for media resource management.",
+                        permission);
+                return PERMISSION_DENIED;
+            }
+        }
     }
 
     // Kill clients pending removal.
@@ -310,6 +350,7 @@
 
     // after this, require mLock whenever using mService
     AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
+    return OK;
 }
 
 //static
@@ -649,6 +690,24 @@
     notify->post();
 }
 
+static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
+    switch (domain) {
+        case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
+        case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
+        case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
+        default:                       return MediaResourceSubType::kUnspecifiedSubType;
+    }
+}
+
+static const char * toCodecMode(MediaCodec::Domain domain) {
+    switch (domain) {
+        case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
+        case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
+        case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
+        default:                       return kCodecModeUnknown;
+    }
+}
+
 }  // namespace
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -744,10 +803,13 @@
       mFlags(0),
       mStickyError(OK),
       mSoftRenderer(NULL),
-      mIsVideo(false),
-      mVideoWidth(0),
-      mVideoHeight(0),
+      mDomain(DOMAIN_UNKNOWN),
+      mWidth(0),
+      mHeight(0),
       mRotationDegrees(0),
+      mConfigColorTransfer(-1),
+      mHDRStaticInfo(false),
+      mHDR10PlusInfo(false),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -755,7 +817,7 @@
       mTunneledInputWidth(0),
       mTunneledInputHeight(0),
       mTunneled(false),
-      mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
+      mTunnelPeekState(TunnelPeekState::kLegacyMode),
       mHaveInputSurface(false),
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
@@ -773,13 +835,8 @@
       mInputBufferCounter(0),
       mGetCodecBase(getCodecBase),
       mGetCodecInfo(getCodecInfo) {
-    if (uid == kNoUid) {
-        mUid = AIBinder_getCallingUid();
-    } else {
-        mUid = uid;
-    }
-    mResourceManagerProxy = new ResourceManagerServiceProxy(pid, mUid,
-            ::ndk::SharedRefBase::make<ResourceManagerClient>(this));
+    mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
+            ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid));
     if (!mGetCodecBase) {
         mGetCodecBase = [](const AString &name, const char *owner) {
             return GetCodecBase(name, owner);
@@ -807,7 +864,6 @@
             return NAME_NOT_FOUND;
         };
     }
-
     initMediametrics();
 }
 
@@ -898,12 +954,72 @@
         mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
+
+    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
+    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
 #endif
 }
 
+void MediaCodec::updateHDRFormatMetric() {
+    int32_t profile = -1;
+    AString mediaType;
+    if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
+            && mOutputFormat->findString("mime", &mediaType)) {
+        hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+        mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+    }
+}
+
+hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
+        const AString &mediaType) {
+    switch (transfer) {
+        case COLOR_TRANSFER_ST2084:
+            if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+                switch (profile) {
+                    case VP9Profile2HDR:
+                        return HDR_FORMAT_HDR10;
+                    case VP9Profile2HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+                switch (profile) {
+                    case AV1ProfileMain10HDR10:
+                        return HDR_FORMAT_HDR10;
+                    case AV1ProfileMain10HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+                switch (profile) {
+                    case HEVCProfileMain10HDR10:
+                        return HDR_FORMAT_HDR10;
+                    case HEVCProfileMain10HDR10Plus:
+                        return HDR_FORMAT_HDR10PLUS;
+                    default:
+                        return HDR_FORMAT_NONE;
+                }
+            } else {
+                return HDR_FORMAT_NONE;
+            }
+        case COLOR_TRANSFER_HLG:
+            if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+                return HDR_FORMAT_HLG;
+            } else {
+                // TODO: DOLBY format
+                return HDR_FORMAT_NONE;
+            }
+        default:
+            return HDR_FORMAT_NONE;
+    }
+}
+
+
 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
     ALOGD("MediaCodec::updateEphemeralMediametrics()");
 
@@ -971,6 +1087,8 @@
 
 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
     switch(state) {
+        case TunnelPeekState::kLegacyMode:
+            return "LegacyMode";
         case TunnelPeekState::kEnabledNoBuffer:
             return "EnabledNoBuffer";
         case TunnelPeekState::kDisabledNoBuffer:
@@ -997,6 +1115,9 @@
     TunnelPeekState previousState = mTunnelPeekState;
     if(tunnelPeek == 0){
         switch (mTunnelPeekState) {
+            case TunnelPeekState::kLegacyMode:
+                msg->setInt32("android._tunnel-peek-set-legacy", 0);
+                [[fallthrough]];
             case TunnelPeekState::kEnabledNoBuffer:
                 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
                 break;
@@ -1009,6 +1130,9 @@
         }
     } else {
         switch (mTunnelPeekState) {
+            case TunnelPeekState::kLegacyMode:
+                msg->setInt32("android._tunnel-peek-set-legacy", 0);
+                [[fallthrough]];
             case TunnelPeekState::kDisabledNoBuffer:
                 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
                 break;
@@ -1155,7 +1279,7 @@
         });
     }
 
-    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
         mBytesInput += buffer->size();
         mFramesInput++;
     }
@@ -1184,7 +1308,7 @@
 
     CHECK_NE(mState, UNINITIALIZED);
 
-    if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+    if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
         int32_t flags = 0;
         (void) buffer->meta()->findInt32("flags", &flags);
 
@@ -1363,7 +1487,11 @@
 }
 
 status_t MediaCodec::init(const AString &name) {
-    mResourceManagerProxy->init();
+    status_t err = mResourceManagerProxy->init();
+    if (err != OK) {
+        mCodec = NULL; // remove the codec
+        return err;
+    }
 
     // save init parameters for reset
     mInitName = name;
@@ -1378,7 +1506,7 @@
     bool secureCodec = false;
     const char *owner = "";
     if (!name.startsWith("android.filter.")) {
-        status_t err = mGetCodecInfo(name, &mCodecInfo);
+        err = mGetCodecInfo(name, &mCodecInfo);
         if (err != OK) {
             mCodec = NULL;  // remove the codec.
             return err;
@@ -1392,7 +1520,13 @@
         mCodecInfo->getSupportedMediaTypes(&mediaTypes);
         for (size_t i = 0; i < mediaTypes.size(); ++i) {
             if (mediaTypes[i].startsWith("video/")) {
-                mIsVideo = true;
+                mDomain = DOMAIN_VIDEO;
+                break;
+            } else if (mediaTypes[i].startsWith("audio/")) {
+                mDomain = DOMAIN_AUDIO;
+                break;
+            } else if (mediaTypes[i].startsWith("image/")) {
+                mDomain = DOMAIN_IMAGE;
                 break;
             }
         }
@@ -1405,12 +1539,17 @@
         return NAME_NOT_FOUND;
     }
 
-    if (mIsVideo) {
+    if (mDomain == DOMAIN_VIDEO) {
         // video codec needs dedicated looper
         if (mCodecLooper == NULL) {
+            status_t err = OK;
             mCodecLooper = new ALooper;
             mCodecLooper->setName("CodecLooper");
-            mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+            err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+            if (OK != err) {
+                ALOGE("Codec Looper failed to start");
+                return err;
+            }
         }
 
         mCodecLooper->registerHandler(mCodec);
@@ -1438,17 +1577,15 @@
 
     if (mMetricsHandle != 0) {
         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
-        mediametrics_setCString(mMetricsHandle, kCodecMode,
-                                mIsVideo ? kCodecModeVideo : kCodecModeAudio);
+        mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
     }
 
-    if (mIsVideo) {
+    if (mDomain == DOMAIN_VIDEO) {
         mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
     }
 
-    status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(secureCodec, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
     for (int i = 0; i <= kMaxRetry; ++i) {
         if (i > 0) {
             // Don't try to reclaim resource for the first time.
@@ -1529,16 +1666,16 @@
         mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
     }
 
-    if (mIsVideo) {
-        format->findInt32("width", &mVideoWidth);
-        format->findInt32("height", &mVideoHeight);
+    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
+        format->findInt32("width", &mWidth);
+        format->findInt32("height", &mHeight);
         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
             mRotationDegrees = 0;
         }
 
         if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mVideoWidth);
-            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mVideoHeight);
+            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
+            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
             mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
             int32_t maxWidth = 0;
             if (format->findInt32("max-width", &maxWidth)) {
@@ -1552,28 +1689,48 @@
             if (format->findInt32("color-format", &colorFormat)) {
                 mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
             }
-            float frameRate = -1.0;
-            if (format->findFloat("frame-rate", &frameRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+            if (mDomain == DOMAIN_VIDEO) {
+                float frameRate = -1.0;
+                if (format->findFloat("frame-rate", &frameRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+                }
+                float captureRate = -1.0;
+                if (format->findFloat("capture-rate", &captureRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+                }
+                float operatingRate = -1.0;
+                if (format->findFloat("operating-rate", &operatingRate)) {
+                    mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+                }
+                int32_t priority = -1;
+                if (format->findInt32("priority", &priority)) {
+                    mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+                }
             }
-            float captureRate = -1.0;
-            if (format->findFloat("capture-rate", &captureRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+            int32_t colorStandard = -1;
+            if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
             }
-            float operatingRate = -1.0;
-            if (format->findFloat("operating-rate", &operatingRate)) {
-                mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+            int32_t colorRange = -1;
+            if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
             }
-            int32_t priority = -1;
-            if (format->findInt32("priority", &priority)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+            int32_t colorTransfer = -1;
+            if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+                mConfigColorTransfer = colorTransfer;
+                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
+            }
+            HDRStaticInfo info;
+            if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
+                    && ColorUtils::isHDRStaticInfoValid(&info)) {
+                mHDRStaticInfo = true;
             }
         }
 
         // Prevent possible integer overflow in downstream code.
-        if (mVideoWidth < 0 || mVideoHeight < 0 ||
-               (uint64_t)mVideoWidth * mVideoHeight > (uint64_t)INT32_MAX / 4) {
-            ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
+        if (mWidth < 0 || mHeight < 0 ||
+               (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
+            ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
             return BAD_VALUE;
         }
 
@@ -1606,7 +1763,7 @@
     }
 
     // push min/max QP to MediaMetrics after shaping
-    if (mIsVideo && mMetricsHandle != 0) {
+    if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
         int32_t qpIMin = -1;
         if (format->findInt32("video-qp-i-min", &qpIMin)) {
             mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
@@ -1659,7 +1816,8 @@
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
+            toMediaResourceSubType(mDomain)));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
@@ -2081,7 +2239,7 @@
                       bool reverse) {
     AString mediaType;
     if (!format->findString("mime", &mediaType)) {
-        ALOGW("mapFormat: no mediaType information");
+        ALOGV("mapFormat: no mediaType information");
         return;
     }
     ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
@@ -2240,7 +2398,7 @@
 }
 
 uint64_t MediaCodec::getGraphicBufferSize() {
-    if (!mIsVideo) {
+    if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
         return 0;
     }
 
@@ -2248,7 +2406,7 @@
     size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
     for (size_t i = 0; i < portNum; ++i) {
         // TODO: this is just an estimation, we should get the real buffer size from ACodec.
-        size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2;
+        size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
     }
     return size;
 }
@@ -2260,7 +2418,8 @@
 
     status_t err;
     std::vector<MediaResourceParcel> resources;
-    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+    resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
+            toMediaResourceSubType(mDomain)));
     // Don't know the buffer size at this point, but it's fine to use 1 because
     // the reclaimResource call doesn't consider the requester's buffer size for now.
     resources.push_back(MediaResource::GraphicMemoryResource(1));
@@ -2463,7 +2622,9 @@
     msg->setObject("c2buffer", obj);
     msg->setInt64("timeUs", presentationTimeUs);
     msg->setInt32("flags", flags);
-    msg->setMessage("tunings", tunings);
+    if (tunings && tunings->countEntries() > 0) {
+        msg->setMessage("tunings", tunings);
+    }
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
@@ -2505,7 +2666,9 @@
     msg->setInt32("skipBlocks", pattern.mSkipBlocks);
     msg->setInt64("timeUs", presentationTimeUs);
     msg->setInt32("flags", flags);
-    msg->setMessage("tunings", tunings);
+    if (tunings && tunings->countEntries() > 0) {
+        msg->setMessage("tunings", tunings);
+    }
     msg->setPointer("errorDetailMsg", errorDetailMsg);
 
     sp<AMessage> response;
@@ -2977,8 +3140,9 @@
                     CHECK(msg->findInt32("err", &err));
                     CHECK(msg->findInt32("actionCode", &actionCode));
 
-                    ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
-                            err, actionCode, mState, stateString(mState).c_str());
+                    ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
+                                              err, StrMediaError(err).c_str(), actionCode,
+                                              mState, stateString(mState).c_str());
                     if (err == DEAD_OBJECT) {
                         mFlags |= kFlagSawMediaServerDie;
                         mFlags &= ~kFlagIsComponentAllocated;
@@ -3034,10 +3198,8 @@
                         case STOPPING:
                         {
                             if (mFlags & kFlagSawMediaServerDie) {
-                                bool postPendingReplies = true;
                                 if (mState == RELEASING && !mReplyID) {
                                     ALOGD("Releasing asynchronously, so nothing to reply here.");
-                                    postPendingReplies = false;
                                 }
                                 // MediaServer died, there definitely won't
                                 // be a shutdown complete notification after
@@ -3050,8 +3212,11 @@
                                 if (mState == RELEASING) {
                                     mComponentName.clear();
                                 }
-                                if (postPendingReplies) {
+                                if (mReplyID) {
                                     postPendingRepliesAndDeferredMessages(origin + ":dead");
+                                } else {
+                                    ALOGD("no pending replies: %s:dead following %s",
+                                          origin.c_str(), mLastReplyOrigin.c_str());
                                 }
                                 sendErrorResponse = false;
                             } else if (!mReplyID) {
@@ -3187,10 +3352,13 @@
                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
                     }
 
-                    if (mIsVideo) {
-                        // audio codec is currently ignored.
-                        mResourceManagerProxy->addResource(
-                                MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
+                    MediaCodecInfo::Attributes attr = mCodecInfo
+                            ? mCodecInfo->getAttributes()
+                            : MediaCodecInfo::Attributes(0);
+                    if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
+                        // software codec is currently ignored.
+                        mResourceManagerProxy->addResource(MediaResource::CodecResource(
+                            mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
                     }
 
                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
@@ -3214,6 +3382,8 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
+                    updateHDRFormatMetric();
+
                     // limit to confirming the opt-in behavior to minimize any behavioral change
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
@@ -3295,6 +3465,7 @@
                                 mComponentName.c_str(),
                                 mInputFormat->debugString(4).c_str(),
                                 mOutputFormat->debugString(4).c_str());
+                        updateHDRFormatMetric();
                         CHECK(obj != NULL);
                         response->setObject("input-surface", obj);
                         mHaveInputSurface = true;
@@ -3319,6 +3490,7 @@
                     if (!msg->findInt32("err", &err)) {
                         CHECK(msg->findMessage("input-format", &mInputFormat));
                         CHECK(msg->findMessage("output-format", &mOutputFormat));
+                        updateHDRFormatMetric();
                         mHaveInputSurface = true;
                     } else {
                         response->setInt32("err", err);
@@ -3356,7 +3528,7 @@
                     }
 
                     CHECK_EQ(mState, STARTING);
-                    if (mIsVideo) {
+                    if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
                         mResourceManagerProxy->addResource(
                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
                     }
@@ -3379,10 +3551,12 @@
                         break;
                     }
                     TunnelPeekState previousState = mTunnelPeekState;
-                    mTunnelPeekState = TunnelPeekState::kBufferRendered;
-                    ALOGV("TunnelPeekState: %s -> %s",
-                          asString(previousState),
-                          asString(TunnelPeekState::kBufferRendered));
+                    if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
+                        mTunnelPeekState = TunnelPeekState::kBufferRendered;
+                        ALOGV("TunnelPeekState: %s -> %s",
+                                asString(previousState),
+                                asString(TunnelPeekState::kBufferRendered));
+                    }
                     updatePlaybackDuration(msg);
                     // check that we have a notification set
                     if (mOnFrameRenderedNotification != NULL) {
@@ -3504,6 +3678,20 @@
 
                 case kWhatDrainThisBuffer:
                 {
+                    if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
+                        sp<RefBase> obj;
+                        CHECK(msg->findObject("buffer", &obj));
+                        sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+                        if (mFlags & kFlagIsAsync) {
+                            // In asynchronous mode, output format change is processed immediately.
+                            handleOutputFormatChangeIfNeeded(buffer);
+                        } else {
+                            postActivityNotificationIfPossible();
+                        }
+                        mBufferChannel->discardBuffer(buffer);
+                        break;
+                    }
+
                     /* size_t index = */updateBuffers(kPortIndexOutput, msg);
 
                     if (mState == FLUSHING
@@ -3785,6 +3973,14 @@
                 mTunneled = false;
             }
 
+            // If mTunnelPeekState is still in kLegacyMode at this point,
+            // configure the codec in legacy mode
+            if (mTunneled && (mTunnelPeekState == TunnelPeekState::kLegacyMode)) {
+                sp<AMessage> params = new AMessage;
+                params->setInt32("android._tunnel-peek-set-legacy", 1);
+                onSetParameters(params);
+            }
+
             int32_t background = 0;
             if (format->findInt32("android._background-mode", &background) && background) {
                 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
@@ -3903,10 +4099,12 @@
             sp<AReplyToken> replyID;
             CHECK(msg->senderAwaitsResponse(&replyID));
             TunnelPeekState previousState = mTunnelPeekState;
-            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
-            ALOGV("TunnelPeekState: %s -> %s",
-                  asString(previousState),
-                  asString(TunnelPeekState::kEnabledNoBuffer));
+            if (previousState != TunnelPeekState::kLegacyMode) {
+                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(TunnelPeekState::kEnabledNoBuffer));
+            }
 
             mReplyID = replyID;
             setState(STARTING);
@@ -4048,26 +4246,29 @@
                 break;
             }
 
-            if (asyncNotify != nullptr) {
-                if (mSurface != NULL) {
-                    if (!mReleaseSurface) {
-                        uint64_t usage = 0;
-                        if (mSurface->getConsumerUsage(&usage) != OK) {
-                            usage = 0;
-                        }
-                        mReleaseSurface.reset(new ReleaseSurface(usage));
+            bool forceSync = false;
+            if (asyncNotify != nullptr && mSurface != NULL) {
+                if (!mReleaseSurface) {
+                    uint64_t usage = 0;
+                    if (mSurface->getConsumerUsage(&usage) != OK) {
+                        usage = 0;
                     }
-                    if (mSurface != mReleaseSurface->getSurface()) {
-                        status_t err = connectToSurface(mReleaseSurface->getSurface());
-                        ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
-                        if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
-                            err = mCodec->setSurface(mReleaseSurface->getSurface());
-                            ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
-                        }
-                        if (err == OK) {
-                            (void)disconnectFromSurface();
-                            mSurface = mReleaseSurface->getSurface();
-                        }
+                    mReleaseSurface.reset(new ReleaseSurface(usage));
+                }
+                if (mSurface != mReleaseSurface->getSurface()) {
+                    status_t err = connectToSurface(mReleaseSurface->getSurface());
+                    ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+                    if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+                        err = mCodec->setSurface(mReleaseSurface->getSurface());
+                        ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+                    }
+                    if (err == OK) {
+                        (void)disconnectFromSurface();
+                        mSurface = mReleaseSurface->getSurface();
+                    } else {
+                        // We were not able to switch the surface, so force
+                        // synchronous release.
+                        forceSync = true;
                     }
                 }
             }
@@ -4091,8 +4292,10 @@
             }
 
             if (asyncNotify != nullptr) {
-                mResourceManagerProxy->markClientForPendingRemoval();
-                postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+                if (!forceSync) {
+                    mResourceManagerProxy->markClientForPendingRemoval();
+                    postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+                }
                 asyncNotifyPost.clear();
                 mAsyncReleaseCompleteNotification = asyncNotify;
             }
@@ -4342,10 +4545,12 @@
             mCodec->signalFlush();
             returnBuffersToCodec();
             TunnelPeekState previousState = mTunnelPeekState;
-            mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
-            ALOGV("TunnelPeekState: %s -> %s",
-                  asString(previousState),
-                  asString(TunnelPeekState::kEnabledNoBuffer));
+            if (previousState != TunnelPeekState::kLegacyMode) {
+                mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
+                ALOGV("TunnelPeekState: %s -> %s",
+                        asString(previousState),
+                        asString(TunnelPeekState::kEnabledNoBuffer));
+            }
             break;
         }
 
@@ -4476,6 +4681,7 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
+    updateHDRFormatMetric();
     mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4501,6 +4707,9 @@
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
+                if (ColorUtils::isHDRStaticInfoValid(&info)) {
+                    mHDRStaticInfo = true;
+                }
             }
         }
 
@@ -4509,6 +4718,7 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
+            mHDR10PlusInfo = true;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
@@ -4553,6 +4763,21 @@
             mCrypto->notifyResolution(width, height);
         }
     }
+
+    if (mMetricsHandle != 0) {
+        int32_t colorStandard = -1;
+        if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
+        }
+        int32_t colorRange = -1;
+        if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
+        }
+        int32_t colorTransfer = -1;
+        if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
+        }
+    }
 }
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
@@ -4582,7 +4807,6 @@
     mCSD.erase(mCSD.begin());
     std::shared_ptr<C2Buffer> c2Buffer;
     sp<hardware::HidlMemory> memory;
-    size_t offset = 0;
 
     if (mFlags & kFlagUseBlockModel) {
         if (hasCryptoOrDescrambler()) {
@@ -4603,7 +4827,6 @@
             memcpy(mem->unsecurePointer(), csd->data(), csd->size());
             ssize_t heapOffset;
             memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
-            offset += heapOffset;
         } else {
             std::shared_ptr<C2LinearBlock> block =
                 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
@@ -4645,12 +4868,10 @@
         sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
             new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
         msg->setObject("c2buffer", obj);
-        msg->setMessage("tunings", new AMessage);
     } else if (memory) {
         sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
             new WrapperObject<sp<hardware::HidlMemory>>{memory}};
         msg->setObject("memory", obj);
-        msg->setMessage("tunings", new AMessage);
     }
 
     return onQueueInputBuffer(msg);
@@ -4777,8 +4998,8 @@
     }
     const CryptoPlugin::SubSample *subSamples;
     size_t numSubSamples;
-    const uint8_t *key;
-    const uint8_t *iv;
+    const uint8_t *key = NULL;
+    const uint8_t *iv = NULL;
     CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
 
     // We allow the simpler queueInputBuffer API to be used even in
@@ -4793,8 +5014,6 @@
 
             subSamples = &ss;
             numSubSamples = 1;
-            key = NULL;
-            iv = NULL;
             pattern.mEncryptBlocks = 0;
             pattern.mSkipBlocks = 0;
         }
@@ -4832,9 +5051,10 @@
     sp<MediaCodecBuffer> buffer = info->mData;
 
     if (c2Buffer || memory) {
-        sp<AMessage> tunings;
-        CHECK(msg->findMessage("tunings", &tunings));
-        onSetParameters(tunings);
+        sp<AMessage> tunings = NULL;
+        if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
+            onSetParameters(tunings);
+        }
 
         status_t err = OK;
         if (c2Buffer) {
@@ -4866,6 +5086,8 @@
         offset = buffer->offset();
         size = buffer->size();
         if (err != OK) {
+            ALOGI("block model buffer attach failed: err = %s (%d)",
+                  StrMediaError(err).c_str(), err);
             return err;
         }
     }
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 6243828..a3040f4 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -18,8 +18,6 @@
 #define LOG_TAG "MediaCodecList"
 #include <utils/Log.h>
 
-#include "MediaCodecListOverrides.h"
-
 #include <binder/IServiceManager.h>
 
 #include <media/IMediaCodecList.h>
@@ -34,6 +32,7 @@
 #include <media/stagefright/CCodec.h>
 #include <media/stagefright/Codec2InfoBuilder.h>
 #include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaCodecListOverrides.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OmxInfoBuilder.h>
 #include <media/stagefright/PersistentSurface.h>
@@ -509,6 +508,29 @@
                 }
             }
         }
+
+        int32_t profile = -1;
+        if (format->findInt32("profile", &profile)) {
+            int32_t level = -1;
+            format->findInt32("level", &level);
+            Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+            capabilities->getSupportedProfileLevels(&profileLevels);
+            auto it = profileLevels.begin();
+            for (; it != profileLevels.end(); ++it) {
+                if (profile != it->mProfile) {
+                    continue;
+                }
+                if (level > -1 && level > it->mLevel) {
+                    continue;
+                }
+                break;
+            }
+
+            if (it == profileLevels.end()) {
+                ALOGV("Codec does not support profile %d with level %d", profile, level);
+                return false;
+            }
+        }
     }
 
     // haven't found a reason to discard this one
diff --git a/media/libstagefright/MediaCodecListOverrides.cpp b/media/libstagefright/MediaCodecListOverrides.cpp
index 4a167d1..9304e45 100644
--- a/media/libstagefright/MediaCodecListOverrides.cpp
+++ b/media/libstagefright/MediaCodecListOverrides.cpp
@@ -18,8 +18,6 @@
 #define LOG_TAG "MediaCodecListOverrides"
 #include <utils/Log.h>
 
-#include "MediaCodecListOverrides.h"
-
 #include <cutils/properties.h>
 #include <gui/Surface.h>
 #include <mediadrm/ICrypto.h>
@@ -30,6 +28,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaCodecListOverrides.h>
 
 namespace android {
 
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0107c32..b07f8f7 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -943,10 +943,17 @@
 
             sp<MediaCodecBuffer> outbuf;
             status_t err = mEncoder->getOutputBuffer(index, &outbuf);
-            if (err != OK || outbuf == NULL || outbuf->data() == NULL
-                || outbuf->size() == 0) {
+            if (err != OK || outbuf == NULL || outbuf->data() == NULL) {
                 signalEOS();
                 break;
+            } else if (outbuf->size() == 0) {
+                // Zero length CSD buffers are not treated as an error
+                if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+                    mEncoder->releaseOutputBuffer(index);
+                } else {
+                    signalEOS();
+                }
+                break;
             }
 
             MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
diff --git a/media/libstagefright/OggWriter.cpp b/media/libstagefright/OggWriter.cpp
index 0bc5976..0f5e95e 100644
--- a/media/libstagefright/OggWriter.cpp
+++ b/media/libstagefright/OggWriter.cpp
@@ -67,7 +67,11 @@
         mFd = -1;
     }
 
-    free(mOs);
+    if (mOs != nullptr) {
+        ogg_stream_clear(mOs);
+        free(mOs);
+        mOs = nullptr;
+    }
 }
 
 status_t OggWriter::initCheck() const {
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index ee9016d..de91533 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -145,7 +145,19 @@
     if (available < num) {
         int32_t newcapacity = mCapacity + (num - available);
         char * newbuffer = new char[newcapacity];
-        memcpy(newbuffer, mCutBuffer, mCapacity);
+        if (mWriteHead < mReadHead) {
+            // data isn't continuous, need to memcpy twice
+            // to move previous data to new buffer.
+            size_t copyLeft = mCapacity - mReadHead;
+            memcpy(newbuffer, mCutBuffer + mReadHead, copyLeft);
+            memcpy(newbuffer + copyLeft, mCutBuffer, mWriteHead);
+            mReadHead = 0;
+            mWriteHead += copyLeft;
+        } else {
+            memcpy(newbuffer, mCutBuffer + mReadHead, mWriteHead - mReadHead);
+            mWriteHead -= mReadHead;
+            mReadHead = 0;
+        }
         delete [] mCutBuffer;
         mCapacity = newcapacity;
         mCutBuffer = newbuffer;
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 7ce2968..22885c9 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -13,7 +13,18 @@
 
   "presubmit-large": [
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaMiscTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaAudioTestCases",
       "options": [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
@@ -23,10 +34,54 @@
         },
         // TODO: b/149314419
         {
-          "exclude-filter": "android.media.cts.AudioPlaybackCaptureTest"
+          "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
         },
         {
-          "exclude-filter": "android.media.cts.AudioRecordTest"
+          "exclude-filter": "android.media.audio.cts.AudioRecordTest"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaDecoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaEncoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaPlayerTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
         }
       ]
     }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4c18f87..a443ed9 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -28,9 +28,6 @@
 #include "include/HevcUtils.h"
 
 #include <cutils/properties.h>
-#include <media/openmax/OMX_Audio.h>
-#include <media/openmax/OMX_Video.h>
-#include <media/openmax/OMX_VideoExt.h>
 #include <media/stagefright/CodecBase.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -57,6 +54,14 @@
 #define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
   "mpegh-compatible-sets"
 
+namespace {
+    // TODO: this should possibly be handled in an else
+    constexpr static int32_t AACObjectNull = 0;
+
+    // TODO: decide if we should just not transmit the level in this case
+    constexpr static int32_t DolbyVisionLevelUnknown = 0;
+}
+
 namespace android {
 
 static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) {
@@ -121,14 +126,10 @@
     }
 }
 
-static bool isHdr(const sp<AMessage> &format) {
-    // if CSD specifies HDR transfer(s), we assume HDR. Otherwise, if it specifies non-HDR
-    // transfers, we must assume non-HDR. This is because CSD trumps any color-transfer key
-    // in the format.
-    int32_t isHdr;
-    if (format->findInt32("android._is-hdr", &isHdr)) {
-        return isHdr;
-    }
+/**
+ * Returns true if, and only if, the given format corresponds to HDR10 or HDR10+.
+ */
+static bool isHdr10or10Plus(const sp<AMessage> &format) {
 
     // if user/container supplied HDR static info without transfer set, assume true
     if ((format->contains("hdr-static-info") || format->contains("hdr10-plus-info"))
@@ -138,8 +139,7 @@
     // otherwise, verify that an HDR transfer function is set
     int32_t transfer;
     if (format->findInt32("color-transfer", &transfer)) {
-        return transfer == ColorUtils::kColorTransferST2084
-                || transfer == ColorUtils::kColorTransferHLG;
+        return transfer == ColorUtils::kColorTransferST2084;
     }
     return false;
 }
@@ -156,21 +156,22 @@
         audioObjectType >>= 11;
     }
 
-    const static ALookup<uint16_t, OMX_AUDIO_AACPROFILETYPE> profiles {
-        { 1,  OMX_AUDIO_AACObjectMain     },
-        { 2,  OMX_AUDIO_AACObjectLC       },
-        { 3,  OMX_AUDIO_AACObjectSSR      },
-        { 4,  OMX_AUDIO_AACObjectLTP      },
-        { 5,  OMX_AUDIO_AACObjectHE       },
-        { 6,  OMX_AUDIO_AACObjectScalable },
-        { 17, OMX_AUDIO_AACObjectERLC     },
-        { 23, OMX_AUDIO_AACObjectLD       },
-        { 29, OMX_AUDIO_AACObjectHE_PS    },
-        { 39, OMX_AUDIO_AACObjectELD      },
-        { 42, OMX_AUDIO_AACObjectXHE      },
+
+    const static ALookup<uint16_t, int32_t> profiles {
+        { 1,  AACObjectMain     },
+        { 2,  AACObjectLC       },
+        { 3,  AACObjectSSR      },
+        { 4,  AACObjectLTP      },
+        { 5,  AACObjectHE       },
+        { 6,  AACObjectScalable },
+        { 17, AACObjectERLC     },
+        { 23, AACObjectLD       },
+        { 29, AACObjectHE_PS    },
+        { 39, AACObjectELD      },
+        { 42, AACObjectXHE      },
     };
 
-    OMX_AUDIO_AACPROFILETYPE profile;
+    int32_t profile;
     if (profiles.map(audioObjectType, &profile)) {
         format->setInt32("profile", profile);
     }
@@ -184,59 +185,92 @@
     const uint8_t constraints = ptr[2];
     const uint8_t level = ptr[3];
 
-    const static ALookup<uint8_t, OMX_VIDEO_AVCLEVELTYPE> levels {
-        {  9, OMX_VIDEO_AVCLevel1b }, // technically, 9 is only used for High+ profiles
-        { 10, OMX_VIDEO_AVCLevel1  },
-        { 11, OMX_VIDEO_AVCLevel11 }, // prefer level 1.1 for the value 11
-        { 11, OMX_VIDEO_AVCLevel1b },
-        { 12, OMX_VIDEO_AVCLevel12 },
-        { 13, OMX_VIDEO_AVCLevel13 },
-        { 20, OMX_VIDEO_AVCLevel2  },
-        { 21, OMX_VIDEO_AVCLevel21 },
-        { 22, OMX_VIDEO_AVCLevel22 },
-        { 30, OMX_VIDEO_AVCLevel3  },
-        { 31, OMX_VIDEO_AVCLevel31 },
-        { 32, OMX_VIDEO_AVCLevel32 },
-        { 40, OMX_VIDEO_AVCLevel4  },
-        { 41, OMX_VIDEO_AVCLevel41 },
-        { 42, OMX_VIDEO_AVCLevel42 },
-        { 50, OMX_VIDEO_AVCLevel5  },
-        { 51, OMX_VIDEO_AVCLevel51 },
-        { 52, OMX_VIDEO_AVCLevel52 },
-        { 60, OMX_VIDEO_AVCLevel6  },
-        { 61, OMX_VIDEO_AVCLevel61 },
-        { 62, OMX_VIDEO_AVCLevel62 },
+    const static ALookup<uint8_t, int32_t> levels {
+        {  9, AVCLevel1b }, // technically, 9 is only used for High+ profiles
+        { 10, AVCLevel1  },
+        { 11, AVCLevel11 }, // prefer level 1.1 for the value 11
+        { 11, AVCLevel1b },
+        { 12, AVCLevel12 },
+        { 13, AVCLevel13 },
+        { 20, AVCLevel2  },
+        { 21, AVCLevel21 },
+        { 22, AVCLevel22 },
+        { 30, AVCLevel3  },
+        { 31, AVCLevel31 },
+        { 32, AVCLevel32 },
+        { 40, AVCLevel4  },
+        { 41, AVCLevel41 },
+        { 42, AVCLevel42 },
+        { 50, AVCLevel5  },
+        { 51, AVCLevel51 },
+        { 52, AVCLevel52 },
+        { 60, AVCLevel6  },
+        { 61, AVCLevel61 },
+        { 62, AVCLevel62 },
     };
-    const static ALookup<uint8_t, OMX_VIDEO_AVCPROFILETYPE> profiles {
-        { 66, OMX_VIDEO_AVCProfileBaseline },
-        { 77, OMX_VIDEO_AVCProfileMain     },
-        { 88, OMX_VIDEO_AVCProfileExtended },
-        { 100, OMX_VIDEO_AVCProfileHigh    },
-        { 110, OMX_VIDEO_AVCProfileHigh10  },
-        { 122, OMX_VIDEO_AVCProfileHigh422 },
-        { 244, OMX_VIDEO_AVCProfileHigh444 },
+    const static ALookup<uint8_t, int32_t> profiles {
+        { 66, AVCProfileBaseline },
+        { 77, AVCProfileMain     },
+        { 88, AVCProfileExtended },
+        { 100, AVCProfileHigh    },
+        { 110, AVCProfileHigh10  },
+        { 122, AVCProfileHigh422 },
+        { 244, AVCProfileHigh444 },
     };
 
     // set profile & level if they are recognized
-    OMX_VIDEO_AVCPROFILETYPE codecProfile;
-    OMX_VIDEO_AVCLEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (profiles.map(profile, &codecProfile)) {
         if (profile == 66 && (constraints & 0x40)) {
-            codecProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedBaseline;
+            codecProfile = AVCProfileConstrainedBaseline;
         } else if (profile == 100 && (constraints & 0x0C) == 0x0C) {
-            codecProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedHigh;
+            codecProfile = AVCProfileConstrainedHigh;
         }
         format->setInt32("profile", codecProfile);
         if (levels.map(level, &codecLevel)) {
             // for 9 && 11 decide level based on profile and constraint_set3 flag
             if (level == 11 && (profile == 66 || profile == 77 || profile == 88)) {
-                codecLevel = (constraints & 0x10) ? OMX_VIDEO_AVCLevel1b : OMX_VIDEO_AVCLevel11;
+                codecLevel = (constraints & 0x10) ? AVCLevel1b : AVCLevel11;
             }
             format->setInt32("level", codecLevel);
         }
     }
 }
 
+static const ALookup<uint8_t, int32_t>&  getDolbyVisionProfileTable() {
+    static const ALookup<uint8_t, int32_t> profileTable = {
+        {1, DolbyVisionProfileDvavPen},
+        {3, DolbyVisionProfileDvheDen},
+        {4, DolbyVisionProfileDvheDtr},
+        {5, DolbyVisionProfileDvheStn},
+        {6, DolbyVisionProfileDvheDth},
+        {7, DolbyVisionProfileDvheDtb},
+        {8, DolbyVisionProfileDvheSt},
+        {9, DolbyVisionProfileDvavSe},
+        {10, DolbyVisionProfileDvav110},
+    };
+    return profileTable;
+}
+
+static const ALookup<uint8_t, int32_t>&  getDolbyVisionLevelsTable() {
+    static const ALookup<uint8_t, int32_t> levelsTable = {
+        {0, DolbyVisionLevelUnknown},
+        {1, DolbyVisionLevelHd24},
+        {2, DolbyVisionLevelHd30},
+        {3, DolbyVisionLevelFhd24},
+        {4, DolbyVisionLevelFhd30},
+        {5, DolbyVisionLevelFhd60},
+        {6, DolbyVisionLevelUhd24},
+        {7, DolbyVisionLevelUhd30},
+        {8, DolbyVisionLevelUhd48},
+        {9, DolbyVisionLevelUhd60},
+        {10, DolbyVisionLevelUhd120},
+        {11, DolbyVisionLevel8k30},
+        {12, DolbyVisionLevel8k60},
+    };
+    return levelsTable;
+}
 static void parseDolbyVisionProfileLevelFromDvcc(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
     // dv_major.dv_minor Should be 1.0 or 2.1
     if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
@@ -256,41 +290,20 @@
 
     // All Dolby Profiles will have profile and level info in MediaFormat
     // Profile 8 and 9 will have bl_compatibility_id too.
-    const static ALookup<uint8_t, OMX_VIDEO_DOLBYVISIONPROFILETYPE> profiles{
-        {1, OMX_VIDEO_DolbyVisionProfileDvavPen},
-        {3, OMX_VIDEO_DolbyVisionProfileDvheDen},
-        {4, OMX_VIDEO_DolbyVisionProfileDvheDtr},
-        {5, OMX_VIDEO_DolbyVisionProfileDvheStn},
-        {6, OMX_VIDEO_DolbyVisionProfileDvheDth},
-        {7, OMX_VIDEO_DolbyVisionProfileDvheDtb},
-        {8, OMX_VIDEO_DolbyVisionProfileDvheSt},
-        {9, OMX_VIDEO_DolbyVisionProfileDvavSe},
-        {10, OMX_VIDEO_DolbyVisionProfileDvav110},
-    };
+    const ALookup<uint8_t, int32_t> &profiles = getDolbyVisionProfileTable();
+    const ALookup<uint8_t, int32_t> &levels = getDolbyVisionLevelsTable();
 
-    const static ALookup<uint8_t, OMX_VIDEO_DOLBYVISIONLEVELTYPE> levels{
-        {0, OMX_VIDEO_DolbyVisionLevelUnknown},
-        {1, OMX_VIDEO_DolbyVisionLevelHd24},
-        {2, OMX_VIDEO_DolbyVisionLevelHd30},
-        {3, OMX_VIDEO_DolbyVisionLevelFhd24},
-        {4, OMX_VIDEO_DolbyVisionLevelFhd30},
-        {5, OMX_VIDEO_DolbyVisionLevelFhd60},
-        {6, OMX_VIDEO_DolbyVisionLevelUhd24},
-        {7, OMX_VIDEO_DolbyVisionLevelUhd30},
-        {8, OMX_VIDEO_DolbyVisionLevelUhd48},
-        {9, OMX_VIDEO_DolbyVisionLevelUhd60},
-    };
     // set rpuAssoc
     if (rpu_present_flag && el_present_flag && !bl_present_flag) {
         format->setInt32("rpuAssoc", 1);
     }
     // set profile & level if they are recognized
-    OMX_VIDEO_DOLBYVISIONPROFILETYPE codecProfile;
-    OMX_VIDEO_DOLBYVISIONLEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (profiles.map(profile, &codecProfile)) {
         format->setInt32("profile", codecProfile);
-        if (codecProfile == OMX_VIDEO_DolbyVisionProfileDvheSt ||
-            codecProfile == OMX_VIDEO_DolbyVisionProfileDvavSe) {
+        if (codecProfile == DolbyVisionProfileDvheSt ||
+            codecProfile == DolbyVisionProfileDvavSe) {
             format->setInt32("bl_compatibility_id", bl_compatibility_id);
         }
         if (levels.map(level, &codecLevel)) {
@@ -307,32 +320,32 @@
     const uint8_t profile = ptr[6];
     const uint8_t level = ptr[5];
 
-    const static ALookup<uint8_t, OMX_VIDEO_H263PROFILETYPE> profiles {
-        { 0, OMX_VIDEO_H263ProfileBaseline },
-        { 1, OMX_VIDEO_H263ProfileH320Coding },
-        { 2, OMX_VIDEO_H263ProfileBackwardCompatible },
-        { 3, OMX_VIDEO_H263ProfileISWV2 },
-        { 4, OMX_VIDEO_H263ProfileISWV3 },
-        { 5, OMX_VIDEO_H263ProfileHighCompression },
-        { 6, OMX_VIDEO_H263ProfileInternet },
-        { 7, OMX_VIDEO_H263ProfileInterlace },
-        { 8, OMX_VIDEO_H263ProfileHighLatency },
+    const static ALookup<uint8_t, int32_t> profiles {
+        { 0, H263ProfileBaseline },
+        { 1, H263ProfileH320Coding },
+        { 2, H263ProfileBackwardCompatible },
+        { 3, H263ProfileISWV2 },
+        { 4, H263ProfileISWV3 },
+        { 5, H263ProfileHighCompression },
+        { 6, H263ProfileInternet },
+        { 7, H263ProfileInterlace },
+        { 8, H263ProfileHighLatency },
     };
 
-    const static ALookup<uint8_t, OMX_VIDEO_H263LEVELTYPE> levels {
-        { 10, OMX_VIDEO_H263Level10 },
-        { 20, OMX_VIDEO_H263Level20 },
-        { 30, OMX_VIDEO_H263Level30 },
-        { 40, OMX_VIDEO_H263Level40 },
-        { 45, OMX_VIDEO_H263Level45 },
-        { 50, OMX_VIDEO_H263Level50 },
-        { 60, OMX_VIDEO_H263Level60 },
-        { 70, OMX_VIDEO_H263Level70 },
+    const static ALookup<uint8_t, int32_t> levels {
+        { 10, H263Level10 },
+        { 20, H263Level20 },
+        { 30, H263Level30 },
+        { 40, H263Level40 },
+        { 45, H263Level45 },
+        { 50, H263Level50 },
+        { 60, H263Level60 },
+        { 70, H263Level70 },
     };
 
     // set profile & level if they are recognized
-    OMX_VIDEO_H263PROFILETYPE codecProfile;
-    OMX_VIDEO_H263LEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (profiles.map(profile, &codecProfile)) {
         format->setInt32("profile", codecProfile);
         if (levels.map(level, &codecLevel)) {
@@ -350,59 +363,63 @@
     const uint8_t tier = (ptr[1] & 0x20) >> 5;
     const uint8_t level = ptr[12];
 
-    const static ALookup<std::pair<uint8_t, uint8_t>, OMX_VIDEO_HEVCLEVELTYPE> levels {
-        { { 0, 30  }, OMX_VIDEO_HEVCMainTierLevel1  },
-        { { 0, 60  }, OMX_VIDEO_HEVCMainTierLevel2  },
-        { { 0, 63  }, OMX_VIDEO_HEVCMainTierLevel21 },
-        { { 0, 90  }, OMX_VIDEO_HEVCMainTierLevel3  },
-        { { 0, 93  }, OMX_VIDEO_HEVCMainTierLevel31 },
-        { { 0, 120 }, OMX_VIDEO_HEVCMainTierLevel4  },
-        { { 0, 123 }, OMX_VIDEO_HEVCMainTierLevel41 },
-        { { 0, 150 }, OMX_VIDEO_HEVCMainTierLevel5  },
-        { { 0, 153 }, OMX_VIDEO_HEVCMainTierLevel51 },
-        { { 0, 156 }, OMX_VIDEO_HEVCMainTierLevel52 },
-        { { 0, 180 }, OMX_VIDEO_HEVCMainTierLevel6  },
-        { { 0, 183 }, OMX_VIDEO_HEVCMainTierLevel61 },
-        { { 0, 186 }, OMX_VIDEO_HEVCMainTierLevel62 },
-        { { 1, 30  }, OMX_VIDEO_HEVCHighTierLevel1  },
-        { { 1, 60  }, OMX_VIDEO_HEVCHighTierLevel2  },
-        { { 1, 63  }, OMX_VIDEO_HEVCHighTierLevel21 },
-        { { 1, 90  }, OMX_VIDEO_HEVCHighTierLevel3  },
-        { { 1, 93  }, OMX_VIDEO_HEVCHighTierLevel31 },
-        { { 1, 120 }, OMX_VIDEO_HEVCHighTierLevel4  },
-        { { 1, 123 }, OMX_VIDEO_HEVCHighTierLevel41 },
-        { { 1, 150 }, OMX_VIDEO_HEVCHighTierLevel5  },
-        { { 1, 153 }, OMX_VIDEO_HEVCHighTierLevel51 },
-        { { 1, 156 }, OMX_VIDEO_HEVCHighTierLevel52 },
-        { { 1, 180 }, OMX_VIDEO_HEVCHighTierLevel6  },
-        { { 1, 183 }, OMX_VIDEO_HEVCHighTierLevel61 },
-        { { 1, 186 }, OMX_VIDEO_HEVCHighTierLevel62 },
+    const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> levels {
+        { { 0, 30  }, HEVCMainTierLevel1  },
+        { { 0, 60  }, HEVCMainTierLevel2  },
+        { { 0, 63  }, HEVCMainTierLevel21 },
+        { { 0, 90  }, HEVCMainTierLevel3  },
+        { { 0, 93  }, HEVCMainTierLevel31 },
+        { { 0, 120 }, HEVCMainTierLevel4  },
+        { { 0, 123 }, HEVCMainTierLevel41 },
+        { { 0, 150 }, HEVCMainTierLevel5  },
+        { { 0, 153 }, HEVCMainTierLevel51 },
+        { { 0, 156 }, HEVCMainTierLevel52 },
+        { { 0, 180 }, HEVCMainTierLevel6  },
+        { { 0, 183 }, HEVCMainTierLevel61 },
+        { { 0, 186 }, HEVCMainTierLevel62 },
+        { { 1, 30  }, HEVCHighTierLevel1  },
+        { { 1, 60  }, HEVCHighTierLevel2  },
+        { { 1, 63  }, HEVCHighTierLevel21 },
+        { { 1, 90  }, HEVCHighTierLevel3  },
+        { { 1, 93  }, HEVCHighTierLevel31 },
+        { { 1, 120 }, HEVCHighTierLevel4  },
+        { { 1, 123 }, HEVCHighTierLevel41 },
+        { { 1, 150 }, HEVCHighTierLevel5  },
+        { { 1, 153 }, HEVCHighTierLevel51 },
+        { { 1, 156 }, HEVCHighTierLevel52 },
+        { { 1, 180 }, HEVCHighTierLevel6  },
+        { { 1, 183 }, HEVCHighTierLevel61 },
+        { { 1, 186 }, HEVCHighTierLevel62 },
     };
 
-    const static ALookup<uint8_t, OMX_VIDEO_HEVCPROFILETYPE> profiles {
-        { 1, OMX_VIDEO_HEVCProfileMain   },
-        { 2, OMX_VIDEO_HEVCProfileMain10 },
+    const static ALookup<uint8_t, int32_t> profiles {
+        { 1, HEVCProfileMain   },
+        { 2, HEVCProfileMain10 },
         // use Main for Main Still Picture decoding
-        { 3, OMX_VIDEO_HEVCProfileMain },
+        { 3, HEVCProfileMain },
     };
 
     // set profile & level if they are recognized
-    OMX_VIDEO_HEVCPROFILETYPE codecProfile;
-    OMX_VIDEO_HEVCLEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (!profiles.map(profile, &codecProfile)) {
         if (ptr[2] & 0x40 /* general compatibility flag 1 */) {
             // Note that this case covers Main Still Picture too
-            codecProfile = OMX_VIDEO_HEVCProfileMain;
+            codecProfile = HEVCProfileMain;
         } else if (ptr[2] & 0x20 /* general compatibility flag 2 */) {
-            codecProfile = OMX_VIDEO_HEVCProfileMain10;
+            codecProfile = HEVCProfileMain10;
         } else {
             return;
         }
     }
 
     // bump to HDR profile
-    if (isHdr(format) && codecProfile == OMX_VIDEO_HEVCProfileMain10) {
-        codecProfile = OMX_VIDEO_HEVCProfileMain10HDR10;
+    if (isHdr10or10Plus(format) && codecProfile == HEVCProfileMain10) {
+        if (format->contains("hdr10-plus-info")) {
+            codecProfile = HEVCProfileMain10HDR10Plus;
+        } else {
+            codecProfile = HEVCProfileMain10HDR10;
+        }
     }
 
     format->setInt32("profile", codecProfile);
@@ -422,36 +439,36 @@
         }
         const uint8_t indication = ((seq[4] & 0xF) << 4) | ((seq[5] & 0xF0) >> 4);
 
-        const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles {
-            { 0x50, OMX_VIDEO_MPEG2ProfileSimple  },
-            { 0x40, OMX_VIDEO_MPEG2ProfileMain    },
-            { 0x30, OMX_VIDEO_MPEG2ProfileSNR     },
-            { 0x20, OMX_VIDEO_MPEG2ProfileSpatial },
-            { 0x10, OMX_VIDEO_MPEG2ProfileHigh    },
+        const static ALookup<uint8_t, int32_t> profiles {
+            { 0x50, MPEG2ProfileSimple  },
+            { 0x40, MPEG2ProfileMain    },
+            { 0x30, MPEG2ProfileSNR     },
+            { 0x20, MPEG2ProfileSpatial },
+            { 0x10, MPEG2ProfileHigh    },
         };
 
-        const static ALookup<uint8_t, OMX_VIDEO_MPEG2LEVELTYPE> levels {
-            { 0x0A, OMX_VIDEO_MPEG2LevelLL  },
-            { 0x08, OMX_VIDEO_MPEG2LevelML  },
-            { 0x06, OMX_VIDEO_MPEG2LevelH14 },
-            { 0x04, OMX_VIDEO_MPEG2LevelHL  },
-            { 0x02, OMX_VIDEO_MPEG2LevelHP  },
+        const static ALookup<uint8_t, int32_t> levels {
+            { 0x0A, MPEG2LevelLL  },
+            { 0x08, MPEG2LevelML  },
+            { 0x06, MPEG2LevelH14 },
+            { 0x04, MPEG2LevelHL  },
+            { 0x02, MPEG2LevelHP  },
         };
 
         const static ALookup<uint8_t,
-                std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE>> escapes {
+                std::pair<int32_t, int32_t>> escapes {
             /* unsupported
-            { 0x8E, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelLL  } },
-            { 0x8D, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelML  } },
-            { 0x8B, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelH14 } },
-            { 0x8A, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelHL  } }, */
-            { 0x85, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelML  } },
-            { 0x82, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelHL  } },
+            { 0x8E, { XXX_MPEG2ProfileMultiView, MPEG2LevelLL  } },
+            { 0x8D, { XXX_MPEG2ProfileMultiView, MPEG2LevelML  } },
+            { 0x8B, { XXX_MPEG2ProfileMultiView, MPEG2LevelH14 } },
+            { 0x8A, { XXX_MPEG2ProfileMultiView, MPEG2LevelHL  } }, */
+            { 0x85, { MPEG2Profile422, MPEG2LevelML  } },
+            { 0x82, { MPEG2Profile422, MPEG2LevelHL  } },
         };
 
-        OMX_VIDEO_MPEG2PROFILETYPE profile;
-        OMX_VIDEO_MPEG2LEVELTYPE level;
-        std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE> profileLevel;
+        int32_t profile;
+        int32_t level;
+        std::pair<int32_t, int32_t> profileLevel;
         if (escapes.map(indication, &profileLevel)) {
             format->setInt32("profile", profileLevel.first);
             format->setInt32("level", profileLevel.second);
@@ -468,16 +485,16 @@
     // esds seems to only contain the profile for MPEG-2
     uint8_t objType;
     if (esds.getObjectTypeIndication(&objType) == OK) {
-        const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles{
-            { 0x60, OMX_VIDEO_MPEG2ProfileSimple  },
-            { 0x61, OMX_VIDEO_MPEG2ProfileMain    },
-            { 0x62, OMX_VIDEO_MPEG2ProfileSNR     },
-            { 0x63, OMX_VIDEO_MPEG2ProfileSpatial },
-            { 0x64, OMX_VIDEO_MPEG2ProfileHigh    },
-            { 0x65, OMX_VIDEO_MPEG2Profile422     },
+        const static ALookup<uint8_t, int32_t> profiles{
+            { 0x60, MPEG2ProfileSimple  },
+            { 0x61, MPEG2ProfileMain    },
+            { 0x62, MPEG2ProfileSNR     },
+            { 0x63, MPEG2ProfileSpatial },
+            { 0x64, MPEG2ProfileHigh    },
+            { 0x65, MPEG2Profile422     },
         };
 
-        OMX_VIDEO_MPEG2PROFILETYPE profile;
+        int32_t profile;
         if (profiles.map(objType, &profile)) {
             format->setInt32("profile", profile);
         }
@@ -492,82 +509,82 @@
         const uint8_t indication = seq[4];
 
         const static ALookup<uint8_t,
-                std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE>> table {
-            { 0b00000001, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level1  } },
-            { 0b00000010, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level2  } },
-            { 0b00000011, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level3  } },
-            { 0b00000100, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level4a } },
-            { 0b00000101, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level5  } },
-            { 0b00000110, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level6  } },
-            { 0b00001000, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level0  } },
-            { 0b00001001, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level0b } },
-            { 0b00010000, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level0  } },
-            { 0b00010001, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level1  } },
-            { 0b00010010, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level2  } },
+                std::pair<int32_t, int32_t>> table {
+            { 0b00000001, { MPEG4ProfileSimple,            MPEG4Level1  } },
+            { 0b00000010, { MPEG4ProfileSimple,            MPEG4Level2  } },
+            { 0b00000011, { MPEG4ProfileSimple,            MPEG4Level3  } },
+            { 0b00000100, { MPEG4ProfileSimple,            MPEG4Level4a } },
+            { 0b00000101, { MPEG4ProfileSimple,            MPEG4Level5  } },
+            { 0b00000110, { MPEG4ProfileSimple,            MPEG4Level6  } },
+            { 0b00001000, { MPEG4ProfileSimple,            MPEG4Level0  } },
+            { 0b00001001, { MPEG4ProfileSimple,            MPEG4Level0b } },
+            { 0b00010000, { MPEG4ProfileSimpleScalable,    MPEG4Level0  } },
+            { 0b00010001, { MPEG4ProfileSimpleScalable,    MPEG4Level1  } },
+            { 0b00010010, { MPEG4ProfileSimpleScalable,    MPEG4Level2  } },
             /* unsupported
-            { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level0  } },
-            { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level1  } },
-            { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level2  } }, */
-            { 0b00100001, { OMX_VIDEO_MPEG4ProfileCore,              OMX_VIDEO_MPEG4Level1  } },
-            { 0b00100010, { OMX_VIDEO_MPEG4ProfileCore,              OMX_VIDEO_MPEG4Level2  } },
-            { 0b00110010, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level2  } },
-            { 0b00110011, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level3  } },
-            { 0b00110100, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level4  } },
+            { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER,        MPEG4Level0  } },
+            { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER,        MPEG4Level1  } },
+            { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER,        MPEG4Level2  } }, */
+            { 0b00100001, { MPEG4ProfileCore,              MPEG4Level1  } },
+            { 0b00100010, { MPEG4ProfileCore,              MPEG4Level2  } },
+            { 0b00110010, { MPEG4ProfileMain,              MPEG4Level2  } },
+            { 0b00110011, { MPEG4ProfileMain,              MPEG4Level3  } },
+            { 0b00110100, { MPEG4ProfileMain,              MPEG4Level4  } },
             /* deprecated
-            { 0b01000010, { OMX_VIDEO_MPEG4ProfileNbit,              OMX_VIDEO_MPEG4Level2  } }, */
-            { 0b01010001, { OMX_VIDEO_MPEG4ProfileScalableTexture,   OMX_VIDEO_MPEG4Level1  } },
-            { 0b01100001, { OMX_VIDEO_MPEG4ProfileSimpleFace,        OMX_VIDEO_MPEG4Level1  } },
-            { 0b01100010, { OMX_VIDEO_MPEG4ProfileSimpleFace,        OMX_VIDEO_MPEG4Level2  } },
-            { 0b01100011, { OMX_VIDEO_MPEG4ProfileSimpleFBA,         OMX_VIDEO_MPEG4Level1  } },
-            { 0b01100100, { OMX_VIDEO_MPEG4ProfileSimpleFBA,         OMX_VIDEO_MPEG4Level2  } },
-            { 0b01110001, { OMX_VIDEO_MPEG4ProfileBasicAnimated,     OMX_VIDEO_MPEG4Level1  } },
-            { 0b01110010, { OMX_VIDEO_MPEG4ProfileBasicAnimated,     OMX_VIDEO_MPEG4Level2  } },
-            { 0b10000001, { OMX_VIDEO_MPEG4ProfileHybrid,            OMX_VIDEO_MPEG4Level1  } },
-            { 0b10000010, { OMX_VIDEO_MPEG4ProfileHybrid,            OMX_VIDEO_MPEG4Level2  } },
-            { 0b10010001, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level1  } },
-            { 0b10010010, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level2  } },
-            { 0b10010011, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level3  } },
-            { 0b10010100, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level4  } },
-            { 0b10100001, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level1  } },
-            { 0b10100010, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level2  } },
-            { 0b10100011, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level3  } },
-            { 0b10110001, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level1  } },
-            { 0b10110010, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level2  } },
-            { 0b10110011, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level3  } },
-            { 0b10110100, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level4  } },
-            { 0b11000001, { OMX_VIDEO_MPEG4ProfileAdvancedCore,      OMX_VIDEO_MPEG4Level1  } },
-            { 0b11000010, { OMX_VIDEO_MPEG4ProfileAdvancedCore,      OMX_VIDEO_MPEG4Level2  } },
-            { 0b11010001, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level1  } },
-            { 0b11010010, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level2  } },
-            { 0b11010011, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level3  } },
+            { 0b01000010, { MPEG4ProfileNbit,              MPEG4Level2  } }, */
+            { 0b01010001, { MPEG4ProfileScalableTexture,   MPEG4Level1  } },
+            { 0b01100001, { MPEG4ProfileSimpleFace,        MPEG4Level1  } },
+            { 0b01100010, { MPEG4ProfileSimpleFace,        MPEG4Level2  } },
+            { 0b01100011, { MPEG4ProfileSimpleFBA,         MPEG4Level1  } },
+            { 0b01100100, { MPEG4ProfileSimpleFBA,         MPEG4Level2  } },
+            { 0b01110001, { MPEG4ProfileBasicAnimated,     MPEG4Level1  } },
+            { 0b01110010, { MPEG4ProfileBasicAnimated,     MPEG4Level2  } },
+            { 0b10000001, { MPEG4ProfileHybrid,            MPEG4Level1  } },
+            { 0b10000010, { MPEG4ProfileHybrid,            MPEG4Level2  } },
+            { 0b10010001, { MPEG4ProfileAdvancedRealTime,  MPEG4Level1  } },
+            { 0b10010010, { MPEG4ProfileAdvancedRealTime,  MPEG4Level2  } },
+            { 0b10010011, { MPEG4ProfileAdvancedRealTime,  MPEG4Level3  } },
+            { 0b10010100, { MPEG4ProfileAdvancedRealTime,  MPEG4Level4  } },
+            { 0b10100001, { MPEG4ProfileCoreScalable,      MPEG4Level1  } },
+            { 0b10100010, { MPEG4ProfileCoreScalable,      MPEG4Level2  } },
+            { 0b10100011, { MPEG4ProfileCoreScalable,      MPEG4Level3  } },
+            { 0b10110001, { MPEG4ProfileAdvancedCoding,    MPEG4Level1  } },
+            { 0b10110010, { MPEG4ProfileAdvancedCoding,    MPEG4Level2  } },
+            { 0b10110011, { MPEG4ProfileAdvancedCoding,    MPEG4Level3  } },
+            { 0b10110100, { MPEG4ProfileAdvancedCoding,    MPEG4Level4  } },
+            { 0b11000001, { MPEG4ProfileAdvancedCore,      MPEG4Level1  } },
+            { 0b11000010, { MPEG4ProfileAdvancedCore,      MPEG4Level2  } },
+            { 0b11010001, { MPEG4ProfileAdvancedScalable,  MPEG4Level1  } },
+            { 0b11010010, { MPEG4ProfileAdvancedScalable,  MPEG4Level2  } },
+            { 0b11010011, { MPEG4ProfileAdvancedScalable,  MPEG4Level3  } },
             /* unsupported
-            { 0b11100001, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level1  } },
-            { 0b11100010, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level2  } },
-            { 0b11100011, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level3  } },
-            { 0b11100100, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level4  } },
-            { 0b11100101, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level1  } },
-            { 0b11100110, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level2  } },
-            { 0b11100111, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level3  } },
-            { 0b11101000, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level4  } },
-            { 0b11101011, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level5  } },
-            { 0b11101100, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level6  } }, */
-            { 0b11110000, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level0  } },
-            { 0b11110001, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level1  } },
-            { 0b11110010, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level2  } },
-            { 0b11110011, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level3  } },
-            { 0b11110100, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level4  } },
-            { 0b11110101, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level5  } },
-            { 0b11110111, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level3b } },
+            { 0b11100001, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level1  } },
+            { 0b11100010, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level2  } },
+            { 0b11100011, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level3  } },
+            { 0b11100100, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level4  } },
+            { 0b11100101, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level1  } },
+            { 0b11100110, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level2  } },
+            { 0b11100111, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level3  } },
+            { 0b11101000, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level4  } },
+            { 0b11101011, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level5  } },
+            { 0b11101100, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level6  } }, */
+            { 0b11110000, { MPEG4ProfileAdvancedSimple,    MPEG4Level0  } },
+            { 0b11110001, { MPEG4ProfileAdvancedSimple,    MPEG4Level1  } },
+            { 0b11110010, { MPEG4ProfileAdvancedSimple,    MPEG4Level2  } },
+            { 0b11110011, { MPEG4ProfileAdvancedSimple,    MPEG4Level3  } },
+            { 0b11110100, { MPEG4ProfileAdvancedSimple,    MPEG4Level4  } },
+            { 0b11110101, { MPEG4ProfileAdvancedSimple,    MPEG4Level5  } },
+            { 0b11110111, { MPEG4ProfileAdvancedSimple,    MPEG4Level3b } },
             /* deprecated
-            { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level0  } },
-            { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level1  } },
-            { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level2  } },
-            { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level3  } },
-            { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level4  } },
-            { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level5  } }, */
+            { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level0  } },
+            { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level1  } },
+            { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level2  } },
+            { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level3  } },
+            { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level4  } },
+            { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level5  } }, */
         };
 
-        std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE> profileLevel;
+        std::pair<int32_t, int32_t> profileLevel;
         if (table.map(indication, &profileLevel)) {
             format->setInt32("profile", profileLevel.first);
             format->setInt32("level", profileLevel.second);
@@ -590,23 +607,32 @@
         switch (id) {
             case 1 /* profileId */:
                 if (length >= 1) {
-                    const static ALookup<uint8_t, OMX_VIDEO_VP9PROFILETYPE> profiles {
-                        { 0, OMX_VIDEO_VP9Profile0 },
-                        { 1, OMX_VIDEO_VP9Profile1 },
-                        { 2, OMX_VIDEO_VP9Profile2 },
-                        { 3, OMX_VIDEO_VP9Profile3 },
+                    const static ALookup<uint8_t, int32_t> profiles {
+                        { 0, VP9Profile0 },
+                        { 1, VP9Profile1 },
+                        { 2, VP9Profile2 },
+                        { 3, VP9Profile3 },
                     };
 
-                    const static ALookup<OMX_VIDEO_VP9PROFILETYPE, OMX_VIDEO_VP9PROFILETYPE> toHdr {
-                        { OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Profile2HDR },
-                        { OMX_VIDEO_VP9Profile3, OMX_VIDEO_VP9Profile3HDR },
+                    const static ALookup<int32_t, int32_t> toHdr10 {
+                        { VP9Profile2, VP9Profile2HDR },
+                        { VP9Profile3, VP9Profile3HDR },
                     };
 
-                    OMX_VIDEO_VP9PROFILETYPE profile;
+                    const static ALookup<int32_t, int32_t> toHdr10Plus {
+                        { VP9Profile2, VP9Profile2HDR10Plus },
+                        { VP9Profile3, VP9Profile3HDR10Plus },
+                    };
+
+                    int32_t profile;
                     if (profiles.map(data[0], &profile)) {
                         // convert to HDR profile
-                        if (isHdr(format)) {
-                            toHdr.lookup(profile, &profile);
+                        if (isHdr10or10Plus(format)) {
+                            if (format->contains("hdr10-plus-info")) {
+                                toHdr10Plus.lookup(profile, &profile);
+                            } else {
+                                toHdr10.lookup(profile, &profile);
+                            }
                         }
 
                         format->setInt32("profile", profile);
@@ -615,24 +641,24 @@
                 break;
             case 2 /* levelId */:
                 if (length >= 1) {
-                    const static ALookup<uint8_t, OMX_VIDEO_VP9LEVELTYPE> levels {
-                        { 10, OMX_VIDEO_VP9Level1  },
-                        { 11, OMX_VIDEO_VP9Level11 },
-                        { 20, OMX_VIDEO_VP9Level2  },
-                        { 21, OMX_VIDEO_VP9Level21 },
-                        { 30, OMX_VIDEO_VP9Level3  },
-                        { 31, OMX_VIDEO_VP9Level31 },
-                        { 40, OMX_VIDEO_VP9Level4  },
-                        { 41, OMX_VIDEO_VP9Level41 },
-                        { 50, OMX_VIDEO_VP9Level5  },
-                        { 51, OMX_VIDEO_VP9Level51 },
-                        { 52, OMX_VIDEO_VP9Level52 },
-                        { 60, OMX_VIDEO_VP9Level6  },
-                        { 61, OMX_VIDEO_VP9Level61 },
-                        { 62, OMX_VIDEO_VP9Level62 },
+                    const static ALookup<uint8_t, int32_t> levels {
+                        { 10, VP9Level1  },
+                        { 11, VP9Level11 },
+                        { 20, VP9Level2  },
+                        { 21, VP9Level21 },
+                        { 30, VP9Level3  },
+                        { 31, VP9Level31 },
+                        { 40, VP9Level4  },
+                        { 41, VP9Level41 },
+                        { 50, VP9Level5  },
+                        { 51, VP9Level51 },
+                        { 52, VP9Level52 },
+                        { 60, VP9Level6  },
+                        { 61, VP9Level61 },
+                        { 62, VP9Level62 },
                     };
 
-                    OMX_VIDEO_VP9LEVELTYPE level;
+                    int32_t level;
                     if (levels.map(data[0], &level)) {
                         format->setInt32("level", level);
                     }
@@ -666,7 +692,7 @@
     int32_t profile;
     if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
         // bump to HDR profile
-        if (isHdr(format) && profile == AV1ProfileMain10) {
+        if (isHdr10or10Plus(format) && profile == AV1ProfileMain10) {
             if (format->contains("hdr10-plus-info")) {
                 profile = AV1ProfileMain10HDR10Plus;
             } else {
@@ -1504,10 +1530,21 @@
         msg->setBuffer("csd-0", buffer);
     }
 
-    if (meta->findData(kKeyDVCC, &type, &data, &size)) {
+    if (meta->findData(kKeyDVCC, &type, &data, &size)
+            || meta->findData(kKeyDVVC, &type, &data, &size)
+            || meta->findData(kKeyDVWC, &type, &data, &size)) {
         const uint8_t *ptr = (const uint8_t *)data;
         ALOGV("DV: calling parseDolbyVisionProfileLevelFromDvcc with data size %zu", size);
         parseDolbyVisionProfileLevelFromDvcc(ptr, size, msg);
+        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+        if (buffer.get() == nullptr || buffer->base() == nullptr) {
+            return NO_MEMORY;
+        }
+        memcpy(buffer->data(), data, size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setBuffer("csd-2", buffer);
     }
 
     *format = msg;
@@ -1759,24 +1796,39 @@
     if (mime.startsWith("video/") || mime.startsWith("image/")) {
         int32_t width;
         int32_t height;
-        if (msg->findInt32("width", &width) && msg->findInt32("height", &height)) {
-            meta->setInt32(kKeyWidth, width);
-            meta->setInt32(kKeyHeight, height);
-        } else {
+        if (!msg->findInt32("width", &width) || !msg->findInt32("height", &height)) {
             ALOGV("did not find width and/or height");
             return BAD_VALUE;
         }
+        if (width <= 0 || height <= 0) {
+            ALOGE("Invalid value of width: %d and/or height: %d", width, height);
+            return BAD_VALUE;
+        }
+        meta->setInt32(kKeyWidth, width);
+        meta->setInt32(kKeyHeight, height);
 
-        int32_t sarWidth, sarHeight;
-        if (msg->findInt32("sar-width", &sarWidth)
-                && msg->findInt32("sar-height", &sarHeight)) {
+        int32_t sarWidth = -1, sarHeight = -1;
+        bool foundWidth, foundHeight;
+        foundWidth = msg->findInt32("sar-width", &sarWidth);
+        foundHeight = msg->findInt32("sar-height", &sarHeight);
+        if (foundWidth || foundHeight) {
+            if (sarWidth <= 0 || sarHeight <= 0) {
+                ALOGE("Invalid value of sarWidth: %d and/or sarHeight: %d", sarWidth, sarHeight);
+                return BAD_VALUE;
+            }
             meta->setInt32(kKeySARWidth, sarWidth);
             meta->setInt32(kKeySARHeight, sarHeight);
         }
 
-        int32_t displayWidth, displayHeight;
-        if (msg->findInt32("display-width", &displayWidth)
-                && msg->findInt32("display-height", &displayHeight)) {
+        int32_t displayWidth = -1, displayHeight = -1;
+        foundWidth = msg->findInt32("display-width", &displayWidth);
+        foundHeight = msg->findInt32("display-height", &displayHeight);
+        if (foundWidth || foundHeight) {
+            if (displayWidth <= 0 || displayHeight <= 0) {
+                ALOGE("Invalid value of displayWidth: %d and/or displayHeight: %d",
+                        displayWidth, displayHeight);
+                return BAD_VALUE;
+            }
             meta->setInt32(kKeyDisplayWidth, displayWidth);
             meta->setInt32(kKeyDisplayHeight, displayHeight);
         }
@@ -1786,17 +1838,29 @@
             if (msg->findInt32("is-default", &isPrimary) && isPrimary) {
                 meta->setInt32(kKeyTrackIsDefault, 1);
             }
-            int32_t tileWidth, tileHeight, gridRows, gridCols;
-            if (msg->findInt32("tile-width", &tileWidth)) {
+            int32_t tileWidth = -1, tileHeight = -1;
+            foundWidth = msg->findInt32("tile-width", &tileWidth);
+            foundHeight = msg->findInt32("tile-height", &tileHeight);
+            if (foundWidth || foundHeight) {
+                if (tileWidth <= 0 || tileHeight <= 0) {
+                    ALOGE("Invalid value of tileWidth: %d and/or tileHeight: %d",
+                            tileWidth, tileHeight);
+                    return BAD_VALUE;
+                }
                 meta->setInt32(kKeyTileWidth, tileWidth);
-            }
-            if (msg->findInt32("tile-height", &tileHeight)) {
                 meta->setInt32(kKeyTileHeight, tileHeight);
             }
-            if (msg->findInt32("grid-rows", &gridRows)) {
+            int32_t gridRows = -1, gridCols = -1;
+            bool foundRows, foundCols;
+            foundRows = msg->findInt32("grid-rows", &gridRows);
+            foundCols = msg->findInt32("grid-cols", &gridCols);
+            if (foundRows || foundCols) {
+                if (gridRows <= 0 || gridCols <= 0) {
+                    ALOGE("Invalid value of gridRows: %d and/or gridCols: %d",
+                            gridRows, gridCols);
+                    return BAD_VALUE;
+                }
                 meta->setInt32(kKeyGridRows, gridRows);
-            }
-            if (msg->findInt32("grid-cols", &gridCols)) {
                 meta->setInt32(kKeyGridCols, gridCols);
             }
         }
@@ -1812,6 +1876,14 @@
                           &cropTop,
                           &cropRight,
                           &cropBottom)) {
+            if (cropLeft < 0 || cropLeft > cropRight || cropRight >= width) {
+                ALOGE("Invalid value of cropLeft: %d and/or cropRight: %d", cropLeft, cropRight);
+                return BAD_VALUE;
+            }
+            if (cropTop < 0 || cropTop > cropBottom || cropBottom >= height) {
+                ALOGE("Invalid value of cropTop: %d and/or cropBottom: %d", cropTop, cropBottom);
+                return BAD_VALUE;
+            }
             meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
         }
 
@@ -1855,9 +1927,16 @@
             ALOGV("did not find channel-count and/or sample-rate");
             return BAD_VALUE;
         }
+        // channel count can be zero in some cases like mpeg h
+        if (sampleRate <= 0 || numChannels < 0) {
+            ALOGE("Invalid value of channel-count: %d and/or sample-rate: %d",
+                   numChannels, sampleRate);
+            return BAD_VALUE;
+        }
         meta->setInt32(kKeyChannelCount, numChannels);
         meta->setInt32(kKeySampleRate, sampleRate);
         int32_t bitsPerSample;
+        // TODO:(b/204430952) add appropriate bound check for bitsPerSample
         if (msg->findInt32("bits-per-sample", &bitsPerSample)) {
             meta->setInt32(kKeyBitsPerSample, bitsPerSample);
         }
@@ -1967,30 +2046,147 @@
                    mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
             meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
         } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
-            if (msg->findBuffer("csd-2", &csd2)) {
-                //dvcc should be 24
-                if (csd2->size() == 24) {
-                    meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
-                    uint8_t *dvcc = csd2->data();
-                    const uint8_t profile = dvcc[2] >> 1;
-                    if (profile > 1 && profile < 9) {
-                        std::vector<uint8_t> hvcc(csd0size + 1024);
-                        size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
-                        meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
-                    } else if (DolbyVisionProfileDvav110 == profile) {
-                        meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
-                    } else {
-                        sp<ABuffer> csd1;
-                        if (msg->findBuffer("csd-1", &csd1)) {
-                            std::vector<char> avcc(csd0size + csd1->size() + 1024);
-                            size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
-                            meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
-                        }
-                    }
+            int32_t profile = -1;
+            uint8_t blCompatibilityId = -1;
+            int32_t level = 0;
+            uint8_t profileVal = -1;
+            uint8_t profileVal1 = -1;
+            uint8_t profileVal2 = -1;
+            constexpr size_t dvccSize = 24;
+
+            const ALookup<uint8_t, int32_t> &profiles =
+                getDolbyVisionProfileTable();
+            const ALookup<uint8_t, int32_t> &levels =
+                getDolbyVisionLevelsTable();
+
+            if (!msg->findBuffer("csd-2", &csd2)) {
+                // MP4 extractors are expected to generate csd buffer
+                // some encoders might not be generating it, in which
+                // case we populate the track metadata dv (cc|vc|wc)
+                // from the 'profile' and 'level' info.
+                // This is done according to Dolby Vision ISOBMFF spec
+
+                if (!msg->findInt32("profile", &profile)) {
+                    ALOGE("Dolby Vision profile not found");
+                    return BAD_VALUE;
                 }
+                msg->findInt32("level", &level);
+
+                if (profile == DolbyVisionProfileDvheSt) {
+                    if (!profiles.rlookup(DolbyVisionProfileDvheSt, &profileVal)) { // dvhe.08
+                        ALOGE("Dolby Vision profile lookup error");
+                        return BAD_VALUE;
+                    }
+                    blCompatibilityId = 4;
+                } else if (profile == DolbyVisionProfileDvavSe) {
+                    if (!profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal)) { // dvav.09
+                        ALOGE("Dolby Vision profile lookup error");
+                        return BAD_VALUE;
+                    }
+                    blCompatibilityId = 2;
+                } else {
+                    ALOGE("Dolby Vision profile look up error");
+                    return BAD_VALUE;
+                }
+
+                profile = (int32_t) profileVal;
+
+                uint8_t level_val = 0;
+                if (!levels.map(level, &level_val)) {
+                    ALOGE("Dolby Vision level lookup error");
+                    return BAD_VALUE;
+                }
+
+                std::vector<uint8_t> dvcc(dvccSize);
+
+                dvcc[0] = 1; // major version
+                dvcc[1] = 0; // minor version
+                dvcc[2] = (uint8_t)((profile & 0x7f) << 1); // dolby vision profile
+                dvcc[2] = (uint8_t)((dvcc[2] | (uint8_t)((level_val >> 5) & 0x1)) & 0xff);
+                dvcc[3] = (uint8_t)((level_val & 0x1f) << 3); // dolby vision level
+                dvcc[3] = (uint8_t)(dvcc[3] | (1 << 2)); // rpu_present_flag
+                dvcc[3] = (uint8_t)(dvcc[3] | (1)); // bl_present_flag
+                dvcc[4] = (uint8_t)(blCompatibilityId << 4); // bl_compatibility id
+
+                profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+                profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+                if (profile > (int32_t) profileVal) {
+                    meta->setData(kKeyDVWC, kTypeDVWC, dvcc.data(), dvccSize);
+                } else if (profile > (int32_t) profileVal1) {
+                    meta->setData(kKeyDVVC, kTypeDVVC, dvcc.data(), dvccSize);
+                } else {
+                    meta->setData(kKeyDVCC, kTypeDVCC, dvcc.data(), dvccSize);
+                }
+
             } else {
-                ALOGE("We need csd-2!!. %s", msg->debugString().c_str());
-                return BAD_VALUE;
+                // we have csd-2, just use that to populate dvcc
+                if (csd2->size() == dvccSize) {
+                    uint8_t *dvcc = csd2->data();
+                    profile = dvcc[2] >> 1;
+
+                    profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+                    profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+                    if (profile > (int32_t) profileVal) {
+                        meta->setData(kKeyDVWC, kTypeDVWC, csd2->data(), csd2->size());
+                    } else if (profile > (int32_t) profileVal1) {
+                        meta->setData(kKeyDVVC, kTypeDVVC, csd2->data(), csd2->size());
+                    } else {
+                         meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
+                    }
+
+                } else {
+                    ALOGE("Convert MessageToMetadata csd-2 is present but not valid");
+                    return BAD_VALUE;
+                }
+            }
+            profiles.rlookup(DolbyVisionProfileDvavPen, &profileVal);
+            profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal1);
+            profiles.rlookup(DolbyVisionProfileDvav110, &profileVal2);
+            if ((profile > (int32_t) profileVal) && (profile < (int32_t) profileVal1)) {
+                std::vector<uint8_t> hvcc(csd0size + 1024);
+                size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
+                meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
+            } else if (profile == (int32_t) profileVal2) {
+                meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+            } else {
+                sp<ABuffer> csd1;
+                if (msg->findBuffer("csd-1", &csd1)) {
+                    std::vector<char> avcc(csd0size + csd1->size() + 1024);
+                    size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
+                    meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+                }
+                else {
+                    // for dolby vision avc, csd0 also holds csd1
+                    size_t i = 0;
+                    int csd0realsize = 0;
+                    do {
+                        i = findNextNalStartCode(csd0->data() + i,
+                                        csd0->size() - i) - csd0->data();
+                        if (i > 0) {
+                            csd0realsize = i;
+                            break;
+                        }
+                        i += 4;
+                    } while(i < csd0->size());
+                    // buffer0 -> csd0
+                    sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
+                    if (buffer0.get() == NULL || buffer0->base() == NULL) {
+                        return NO_MEMORY;
+                    }
+                    memcpy(buffer0->data(), csd0->data(), csd0realsize);
+                    // buffer1 -> csd1
+                    sp<ABuffer> buffer1 = new (std::nothrow)
+                            ABuffer(csd0->size() - csd0realsize);
+                    if (buffer1.get() == NULL || buffer1->base() == NULL) {
+                        return NO_MEMORY;
+                    }
+                    memcpy(buffer1->data(), csd0->data()+csd0realsize,
+                                csd0->size() - csd0realsize);
+
+                    std::vector<char> avcc(csd0->size() + 1024);
+                    size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
+                    meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+                }
             }
         } else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
             meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
@@ -2131,29 +2327,29 @@
 }
 
 struct aac_format_conv_t {
-    OMX_AUDIO_AACPROFILETYPE eAacProfileType;
+    int32_t eAacProfileType;
     audio_format_t format;
 };
 
 static const struct aac_format_conv_t profileLookup[] = {
-    { OMX_AUDIO_AACObjectMain,        AUDIO_FORMAT_AAC_MAIN},
-    { OMX_AUDIO_AACObjectLC,          AUDIO_FORMAT_AAC_LC},
-    { OMX_AUDIO_AACObjectSSR,         AUDIO_FORMAT_AAC_SSR},
-    { OMX_AUDIO_AACObjectLTP,         AUDIO_FORMAT_AAC_LTP},
-    { OMX_AUDIO_AACObjectHE,          AUDIO_FORMAT_AAC_HE_V1},
-    { OMX_AUDIO_AACObjectScalable,    AUDIO_FORMAT_AAC_SCALABLE},
-    { OMX_AUDIO_AACObjectERLC,        AUDIO_FORMAT_AAC_ERLC},
-    { OMX_AUDIO_AACObjectLD,          AUDIO_FORMAT_AAC_LD},
-    { OMX_AUDIO_AACObjectHE_PS,       AUDIO_FORMAT_AAC_HE_V2},
-    { OMX_AUDIO_AACObjectELD,         AUDIO_FORMAT_AAC_ELD},
-    { OMX_AUDIO_AACObjectXHE,         AUDIO_FORMAT_AAC_XHE},
-    { OMX_AUDIO_AACObjectNull,        AUDIO_FORMAT_AAC},
+    { AACObjectMain,        AUDIO_FORMAT_AAC_MAIN},
+    { AACObjectLC,          AUDIO_FORMAT_AAC_LC},
+    { AACObjectSSR,         AUDIO_FORMAT_AAC_SSR},
+    { AACObjectLTP,         AUDIO_FORMAT_AAC_LTP},
+    { AACObjectHE,          AUDIO_FORMAT_AAC_HE_V1},
+    { AACObjectScalable,    AUDIO_FORMAT_AAC_SCALABLE},
+    { AACObjectERLC,        AUDIO_FORMAT_AAC_ERLC},
+    { AACObjectLD,          AUDIO_FORMAT_AAC_LD},
+    { AACObjectHE_PS,       AUDIO_FORMAT_AAC_HE_V2},
+    { AACObjectELD,         AUDIO_FORMAT_AAC_ELD},
+    { AACObjectXHE,         AUDIO_FORMAT_AAC_XHE},
+    { AACObjectNull,        AUDIO_FORMAT_AAC},
 };
 
 void mapAACProfileToAudioFormat( audio_format_t& format, uint64_t eAacProfile)
 {
-const struct aac_format_conv_t* p = &profileLookup[0];
-    while (p->eAacProfileType != OMX_AUDIO_AACObjectNull) {
+    const struct aac_format_conv_t* p = &profileLookup[0];
+    while (p->eAacProfileType != AACObjectNull) {
         if (eAacProfile == p->eAacProfileType) {
             format = p->format;
             return;
@@ -2193,7 +2389,7 @@
     // Offloading depends on audio DSP capabilities.
     int32_t aacaot = -1;
     if (meta->findInt32(kKeyAACAOT, &aacaot)) {
-        mapAACProfileToAudioFormat(info->format,(OMX_AUDIO_AACPROFILETYPE) aacaot);
+        mapAACProfileToAudioFormat(info->format, aacaot);
     }
 
     int32_t srate = -1;
diff --git a/media/libstagefright/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp
index 4e5b5e2..de8e425 100644
--- a/media/libstagefright/VideoFrameScheduler.cpp
+++ b/media/libstagefright/VideoFrameScheduler.cpp
@@ -22,8 +22,8 @@
 #include <utils/String16.h>
 
 #include <binder/IServiceManager.h>
-#include <gui/ISurfaceComposer.h>
-#include <ui/DisplayStatInfo.h>
+#include <android/gui/ISurfaceComposer.h>
+#include <android/gui/DisplayStatInfo.h>
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
@@ -39,21 +39,22 @@
     mVsyncTime = 0;
     mVsyncPeriod = 0;
 
+    // TODO(b/220021255): wrap this into SurfaceComposerClient
     if (mComposer == NULL) {
-        String16 name("SurfaceFlinger");
+        String16 name("SurfaceFlingerAIDL");
         sp<IServiceManager> sm = defaultServiceManager();
-        mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
+        mComposer = interface_cast<gui::ISurfaceComposer>(sm->checkService(name));
     }
     if (mComposer != NULL) {
-        DisplayStatInfo stats;
-        status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
-        if (res == OK) {
+        gui::DisplayStatInfo stats;
+        binder::Status status = mComposer->getDisplayStats(nullptr/* display */, &stats);
+        if (status.isOk()) {
             ALOGV("vsync time:%lld period:%lld",
                     (long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
             mVsyncTime = stats.vsyncTime;
             mVsyncPeriod = stats.vsyncPeriod;
         } else {
-            ALOGW("getDisplayStats returned %d", res);
+            ALOGW("getDisplayStats returned %d", status.transactionError());
         }
     } else {
         ALOGW("could not get surface mComposer service");
diff --git a/media/libstagefright/codecs/common/Android.bp b/media/libstagefright/codecs/common/Android.bp
index 08691e7..affc837 100644
--- a/media/libstagefright/codecs/common/Android.bp
+++ b/media/libstagefright/codecs/common/Android.bp
@@ -21,6 +21,10 @@
     name: "libstagefright_enc_common",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
     srcs: ["cmnMemory.c"],
 
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index fb6c4e2..bb1cb0b 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -354,7 +354,7 @@
             }
 
             if (mpeg4type->eProfile != OMX_VIDEO_MPEG4ProfileCore ||
-                mpeg4type->eLevel != OMX_VIDEO_MPEG4Level2 ||
+                mpeg4type->eLevel > OMX_VIDEO_MPEG4Level2 ||
                 (mpeg4type->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) ||
                 mpeg4type->nBFrames != 0 ||
                 mpeg4type->nIDCVLCThreshold != 0 ||
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index 06cebd3..7ff9b10 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -25,10 +25,6 @@
         "SoftwareRenderer.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/native/include/media/openmax",
-    ],
-
     shared_libs: [
         "libui",
         "libnativewindow",
@@ -37,6 +33,7 @@
     header_libs: [
         "libstagefright_headers",
         "libstagefright_foundation_headers",
+        "media_plugin_headers",
     ],
 
     static_libs: ["libyuv_static"],
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index c7dc415..4b4f65f 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -23,6 +23,7 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaErrors.h>
 
 #include "libyuv/convert_from.h"
@@ -51,35 +52,183 @@
 static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
     return colorFormat == OMX_COLOR_Format16bitRGB565
             || colorFormat == OMX_COLOR_Format32BitRGBA8888
-            || colorFormat == OMX_COLOR_Format32bitBGRA8888;
+            || colorFormat == OMX_COLOR_Format32bitBGRA8888
+            || colorFormat == COLOR_Format32bitABGR2101010;
 }
 
-bool ColorConverter::ColorSpace::isBt709() {
-    return (mStandard == ColorUtils::kColorStandardBT709);
+bool ColorConverter::ColorSpace::isBt2020() const {
+    return (mStandard == ColorUtils::kColorStandardBT2020);
 }
 
+bool ColorConverter::ColorSpace::isH420() const {
+    return (mStandard == ColorUtils::kColorStandardBT709)
+            && (mRange == ColorUtils::kColorRangeLimited);
+}
 
-bool ColorConverter::ColorSpace::isJpeg() {
+// the matrix coefficients are the same for both 601.625 and 601.525 standards
+bool ColorConverter::ColorSpace::isI420() const {
+    return ((mStandard == ColorUtils::kColorStandardBT601_625)
+            || (mStandard == ColorUtils::kColorStandardBT601_525))
+            && (mRange == ColorUtils::kColorRangeLimited);
+}
+
+bool ColorConverter::ColorSpace::isJ420() const {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
             || (mStandard == ColorUtils::kColorStandardBT601_525))
             && (mRange == ColorUtils::kColorRangeFull);
 }
 
+/**
+ * This class approximates the standard YUV to RGB conversions by factoring the matrix
+ * coefficients to 1/256th-s (as dividing by 256 is easy to do with right shift). The chosen value
+ * of 256 is somewhat arbitrary and was not dependent on the bit-depth, but it does limit the
+ * precision of the matrix coefficients (KR & KB).
+ *
+ * The maximum color error after clipping from using 256 is a distance of:
+ *   0.4 (8-bit) / 1.4 (10-bit) for greens in BT.601
+ *   0.5 (8-bit) / 1.9 (10-bit) for cyans in BT.709, and
+ *   0.3 (8-bit) / 1.3 (10-bit) for violets in BT.2020 (it is 0.4 for 10-bit BT.2020 limited)
+ *
+ * Note for reference: libyuv is using a divisor of 64 instead of 256 to ensure no overflow in
+ * 16-bit math. The maximum color error for libyuv is 3.5 / 14.
+ *
+ * The clamping is done using a lookup vector where negative indices are mapped to 0
+ * and indices > 255 are mapped to 255. (For 10-bit these are clamped to 0 to 1023)
+ *
+ * The matrices are assumed to be of the following format (note the sign on the 2nd row):
+ *
+ * [ R ]     [ _y     0    _r_v ]   [ Y -  C16 ]
+ * [ G ]  =  [ _y  -_g_u  -_g_v ] * [ U - C128 ]
+ * [ B ]     [ _y   _b_u     0  ]   [ V - C128 ]
+ *
+ * C16 is 1 << (bitdepth - 4) for limited range, and 0 for full range
+ * C128 is 1 << (bitdepth - 1)
+ * C255 is (1 << bitdepth) - 1
+ *
+ * The min and max values from these equations determine the clip range needed for clamping:
+ *
+ * min = - (_y * C16 + max((_g_u + _g_v) * (C255-C128), max(_r_v, _b_u) * C128)) / 256
+ * max = (_y * (C255 - C16) + max((_g_u + _g_v) * C128, max(_r_v, _b_u) * (C255-C128)) + 128) / 256
+ */
+
+struct ColorConverter::Coeffs {
+    int32_t _y;
+    int32_t _r_v;
+    int32_t _g_u;
+    int32_t _g_v;
+    int32_t _b_u;
+};
+
+/*
+
+Color conversion rules are dictated by ISO (e.g. ISO:IEC 23008:2)
+
+Limited range means Y is in [16, 235], U and V are in [16, 224] corresponding to [-0.5 to 0.5].
+
+Full range means Y is in [0, 255], U and V are in [0.5, 255.5] corresponding to [-0.5 to .5].
+
+RGB is always in full range ([0, 255])
+
+The color primaries determine the KR and KB values:
+
+
+For full range (assuming 8-bits) ISO defines:
+
+(   Y   )   (  KR      1-KR-KB       KB  )
+(       )   (                            )   (R)
+(       )   (-KR/2   -(1-KR-KB)/2        )   ( )
+(U - 128) = (-----   ------------    0.5 ) * (G)
+(       )   ((1-KB)     (1-KB)           )   ( )
+(       )   (                            )   (B)
+(       )   (        -(1-KR-KB)/2  -KB/2 )
+(V - 128)   ( 0.5    ------------  ----- )
+            (           (1-KR)     (1-KR))
+
+(the math is rounded, 128 is (1 << (bitdepth - 1)) )
+
+From this
+
+(R)      ( 1       0        2*(1-KR)   )   (   Y   )
+( )      (                             )   (       )
+( )      (    2*KB*(KB-1)  2*KR*(KR-1) )   (       )
+(G)  =   ( 1  -----------  ----------- ) * (U - 128)
+( )      (      1-KR-KB      1-KR-KB   )   (       )
+( )      (                             )   (       )
+(B)      ( 1   2*(1-KB)         0      )   (V - 128)
+
+For limited range, this becomes
+
+(R)      ( 1       0        2*(1-KR)   )   (255/219  0  0)   (Y -  16)
+( )      (                             )   (             )   (       )
+( )      (    2*KB*(KB-1)  2*KR*(KR-1) )   (             )   (       )
+(G)  =   ( 1  -----------  ----------- ) * (0  255/224  0) * (U - 128)
+( )      (      1-KR-KB      1-KR-KB   )   (             )   (       )
+( )      (                             )   (             )   (       )
+(B)      ( 1   2*(1-KB)         0      )   (0  0  255/224)   (V - 128)
+
+( For non-8-bit, 16 is (1 << (bitdepth - 4)), 128 is (1 << (bitdepth - 1)),
+  255 is ((1 << bitdepth) - 1), 219 is (219 << (bitdepth - 8)) and
+  224 is (224 << (bitdepth - 8)), so the matrix coefficients slightly change. )
+
+*/
+
+namespace {
+
+/**
+ * BT.601:  K_R = 0.299;  K_B = 0.114
+ *
+ * clip range 8-bit: [-277, 535], 10-bit: [-1111, 2155]
+ */
+const struct ColorConverter::Coeffs BT601_FULL      = { 256, 359,  88, 183, 454 };
+const struct ColorConverter::Coeffs BT601_LIMITED   = { 298, 409, 100, 208, 516 };
+const struct ColorConverter::Coeffs BT601_LTD_10BIT = { 299, 410, 101, 209, 518 };
+
+/**
+ * BT.709:  K_R = 0.2126; K_B = 0.0722
+ *
+ * clip range 8-bit: [-289, 547], 10-bit: [-1159, 2202]
+ */
+const struct ColorConverter::Coeffs BT709_FULL      = { 256, 403,  48, 120, 475 };
+const struct ColorConverter::Coeffs BT709_LIMITED   = { 298, 459,  55, 136, 541 };
+const struct ColorConverter::Coeffs BT709_LTD_10BIT = { 290, 460,  55, 137, 542 };
+
+/**
+ * BT.2020:  K_R = 0.2627; K_B = 0.0593
+ *
+ * clip range 8-bit: [-294, 552], 10-bit: [-1175, 2218]
+ *
+ * This is the largest clip range.
+ */
+const struct ColorConverter::Coeffs BT2020_FULL      = { 256, 377,  42, 146, 482 };
+const struct ColorConverter::Coeffs BT2020_LIMITED   = { 298, 430,  48, 167, 548 };
+const struct ColorConverter::Coeffs BT2020_LTD_10BIT = { 299, 431,  48, 167, 550 };
+
+constexpr int CLIP_RANGE_MIN_8BIT = -294;
+constexpr int CLIP_RANGE_MAX_8BIT = 552;
+
+constexpr int CLIP_RANGE_MIN_10BIT = -1175;
+constexpr int CLIP_RANGE_MAX_10BIT = 2218;
+
+}
+
 ColorConverter::ColorConverter(
         OMX_COLOR_FORMATTYPE from, OMX_COLOR_FORMATTYPE to)
     : mSrcFormat(from),
       mDstFormat(to),
       mSrcColorSpace({0, 0, 0}),
-      mClip(NULL) {
+      mClip(NULL),
+      mClip10Bit(NULL) {
 }
 
 ColorConverter::~ColorConverter() {
     delete[] mClip;
     mClip = NULL;
+    delete[] mClip10Bit;
+    mClip10Bit = NULL;
 }
 
 bool ColorConverter::isValid() const {
-    switch (mSrcFormat) {
+    switch ((int32_t)mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar16:
             if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
                 return true;
@@ -98,10 +247,13 @@
         case OMX_COLOR_FormatYUV420SemiPlanar:
 #ifdef USE_LIBYUV
             return mDstFormat == OMX_COLOR_Format16bitRGB565
-                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888;
+                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+                    || mDstFormat == OMX_COLOR_Format32bitBGRA8888;
 #else
             return mDstFormat == OMX_COLOR_Format16bitRGB565;
 #endif
+        case COLOR_FormatYUVP010:
+            return mDstFormat == COLOR_Format32bitABGR2101010;
 
         default:
             return false;
@@ -143,9 +295,10 @@
       mCropTop(cropTop),
       mCropRight(cropRight),
       mCropBottom(cropBottom) {
-    switch(mColorFormat) {
+    switch((int32_t)mColorFormat) {
     case OMX_COLOR_Format16bitRGB565:
     case OMX_COLOR_FormatYUV420Planar16:
+    case COLOR_FormatYUVP010:
     case OMX_COLOR_FormatCbYCrY:
         mBpp = 2;
         mStride = 2 * mWidth;
@@ -153,6 +306,7 @@
 
     case OMX_COLOR_Format32bitBGRA8888:
     case OMX_COLOR_Format32BitRGBA8888:
+    case COLOR_Format32bitABGR2101010:
     case OMX_COLOR_FormatYUV444Y410:
         mBpp = 4;
         mStride = 4 * mWidth;
@@ -213,7 +367,7 @@
 
     status_t err;
 
-    switch (mSrcFormat) {
+    switch ((int32_t)mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar:
 #ifdef USE_LIBYUV
             err = convertYUV420PlanarUseLibYUV(src, dst);
@@ -235,6 +389,19 @@
             break;
         }
 
+        case COLOR_FormatYUVP010:
+        {
+#if PERF_PROFILING
+            int64_t startTimeUs = ALooper::GetNowUs();
+#endif
+            err = convertYUVP010(src, dst);
+#if PERF_PROFILING
+            int64_t endTimeUs = ALooper::GetNowUs();
+            ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
+#endif
+            break;
+        }
+
         case OMX_COLOR_FormatCbYCrY:
             err = convertCbYCrY(src, dst);
             break;
@@ -265,10 +432,53 @@
     return err;
 }
 
+const struct ColorConverter::Coeffs *ColorConverter::getMatrix() const {
+    const bool isFullRange = mSrcColorSpace.mRange == ColorUtils::kColorRangeFull;
+    const bool is10Bit = (mSrcFormat == COLOR_FormatYUVP010
+            || mSrcFormat == OMX_COLOR_FormatYUV420Planar16);
+
+    switch (mSrcColorSpace.mStandard) {
+    case ColorUtils::kColorStandardBT601_525:
+    case ColorUtils::kColorStandardBT601_625:
+        return (isFullRange ? &BT601_FULL :
+                is10Bit ? &BT601_LTD_10BIT : &BT601_LIMITED);
+
+    case ColorUtils::kColorStandardBT709:
+        return (isFullRange ? &BT709_FULL :
+                is10Bit ? &BT709_LTD_10BIT : &BT709_LIMITED);
+
+    case ColorUtils::kColorStandardBT2020:
+        return (isFullRange ? &BT2020_FULL :
+                is10Bit ? &BT2020_LTD_10BIT : &BT2020_LIMITED);
+
+    default:
+        // for now use the default matrices for unhandled color spaces
+        // TODO: fail?
+        // return nullptr;
+        [[fallthrough]];
+
+    case ColorUtils::kColorStandardUnspecified:
+        return is10Bit ? &BT2020_LTD_10BIT : &BT601_LIMITED;
+
+    }
+}
+
 status_t ColorConverter::convertCbYCrY(
         const BitmapParams &src, const BitmapParams &dst) {
     // XXX Untested
 
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+
     uint8_t *kAdjustedClip = initClip();
 
     uint16_t *dst_ptr = (uint16_t *)dst.mBits
@@ -279,22 +489,22 @@
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_ptr[2 * x + 1] - 16;
-            signed y2 = (signed)src_ptr[2 * x + 3] - 16;
+            signed y1 = (signed)src_ptr[2 * x + 1] - _c16;
+            signed y2 = (signed)src_ptr[2 * x + 3] - _c16;
             signed u = (signed)src_ptr[2 * x] - 128;
             signed v = (signed)src_ptr[2 * x + 2] - 128;
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * 298;
+            signed tmp1 = y1 * _y + 128;
             signed b1 = (tmp1 + u_b) / 256;
             signed g1 = (tmp1 + v_g + u_g) / 256;
             signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * 298;
+            signed tmp2 = y2 * _y + 128;
             signed b2 = (tmp2 + u_b) / 256;
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
@@ -323,15 +533,32 @@
     return OK;
 }
 
+/*
+    libyuv supports the following color spaces:
+
+    I420: BT.601 limited range
+    J420: BT.601 full range (jpeg)
+    H420: BT.709 limited range
+
+*/
+
 #define DECLARE_YUV2RGBFUNC(func, rgb) int (*func)(     \
-        const uint8_t*, int, const uint8_t*, int,           \
-        const uint8_t*, int, uint8_t*, int, int, int)       \
-        = mSrcColorSpace.isBt709() ? libyuv::H420To##rgb \
-        : mSrcColorSpace.isJpeg() ? libyuv::J420To##rgb  \
+        const uint8_t*, int, const uint8_t*, int,       \
+        const uint8_t*, int, uint8_t*, int, int, int)   \
+        = mSrcColorSpace.isH420() ? libyuv::H420To##rgb \
+        : mSrcColorSpace.isJ420() ? libyuv::J420To##rgb \
         : libyuv::I420To##rgb
 
 status_t ColorConverter::convertYUV420PlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
+    // Fall back to our conversion if libyuv does not support the color space.
+    // I420 (BT.601 limited) is default, so don't fall back if we end up using it anyway.
+    if (!mSrcColorSpace.isH420() && !mSrcColorSpace.isJ420()
+            // && !mSrcColorSpace.isI420() /* same as line below */
+            && getMatrix() != &BT601_LIMITED) {
+        return convertYUV420Planar(src, dst);
+    }
+
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
@@ -379,6 +606,13 @@
 
 status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
         const BitmapParams &src, const BitmapParams &dst) {
+    // Fall back to our conversion if libyuv does not support the color space.
+    // libyuv only supports BT.601 limited range NV12. Don't fall back if we end up using it anyway.
+    if (// !mSrcColorSpace.isI420() && /* same as below */
+        getMatrix() != &BT601_LIMITED) {
+        return convertYUV420SemiPlanar(src, dst);
+    }
+
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
         + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
 
@@ -419,16 +653,16 @@
     case OMX_COLOR_FormatYUV420Planar:
         return [](void *src_y, void *src_u, void *src_v, size_t x,
                   signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = ((uint8_t*)src_y)[x] - 16;
-            *y2 = ((uint8_t*)src_y)[x + 1] - 16;
+            *y1 = ((uint8_t*)src_y)[x];
+            *y2 = ((uint8_t*)src_y)[x + 1];
             *u = ((uint8_t*)src_u)[x / 2] - 128;
             *v = ((uint8_t*)src_v)[x / 2] - 128;
         };
     case OMX_COLOR_FormatYUV420Planar16:
         return [](void *src_y, void *src_u, void *src_v, size_t x,
                 signed *y1, signed *y2, signed *u, signed *v) {
-            *y1 = (signed)(((uint16_t*)src_y)[x] >> 2) - 16;
-            *y2 = (signed)(((uint16_t*)src_y)[x + 1] >> 2) - 16;
+            *y1 = (signed)(((uint16_t*)src_y)[x] >> 2);
+            *y2 = (signed)(((uint16_t*)src_y)[x + 1] >> 2);
             *u = (signed)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
             *v = (signed)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
         };
@@ -438,24 +672,26 @@
     return nullptr;
 }
 
+// TRICKY: this method only supports RGBA_1010102 output for 10-bit sources, and all other outputs
+// for 8-bit sources as the type of kAdjustedClip is hardcoded based on output, not input.
 std::function<void (void *, bool, signed, signed, signed, signed, signed, signed)>
-getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, uint8_t *kAdjustedClip) {
-    switch (dstFormat) {
+getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, void *kAdjustedClip) {
+    switch ((int)dstFormat) {
     case OMX_COLOR_Format16bitRGB565:
     {
         return [kAdjustedClip](void *dst_ptr, bool uncropped,
                                signed r1, signed g1, signed b1,
                                signed r2, signed g2, signed b2) {
             uint32_t rgb1 =
-                ((kAdjustedClip[r1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[b1] >> 3);
+                ((((uint8_t *)kAdjustedClip)[r1] >> 3) << 11)
+                | ((((uint8_t *)kAdjustedClip)[g1] >> 2) << 5)
+                | (((uint8_t *)kAdjustedClip)[b1] >> 3);
 
             if (uncropped) {
                 uint32_t rgb2 =
-                    ((kAdjustedClip[r2] >> 3) << 11)
-                    | ((kAdjustedClip[g2] >> 2) << 5)
-                    | (kAdjustedClip[b2] >> 3);
+                    ((((uint8_t *)kAdjustedClip)[r2] >> 3) << 11)
+                    | ((((uint8_t *)kAdjustedClip)[g2] >> 2) << 5)
+                    | (((uint8_t *)kAdjustedClip)[b2] >> 3);
 
                 *(uint32_t *)dst_ptr = (rgb2 << 16) | rgb1;
             } else {
@@ -469,16 +705,16 @@
                                signed r1, signed g1, signed b1,
                                signed r2, signed g2, signed b2) {
             ((uint32_t *)dst_ptr)[0] =
-                    (kAdjustedClip[r1])
-                    | (kAdjustedClip[g1] << 8)
-                    | (kAdjustedClip[b1] << 16)
+                    (((uint8_t *)kAdjustedClip)[r1])
+                    | (((uint8_t *)kAdjustedClip)[g1] << 8)
+                    | (((uint8_t *)kAdjustedClip)[b1] << 16)
                     | (0xFF << 24);
 
             if (uncropped) {
                 ((uint32_t *)dst_ptr)[1] =
-                        (kAdjustedClip[r2])
-                        | (kAdjustedClip[g2] << 8)
-                        | (kAdjustedClip[b2] << 16)
+                        (((uint8_t *)kAdjustedClip)[r2])
+                        | (((uint8_t *)kAdjustedClip)[g2] << 8)
+                        | (((uint8_t *)kAdjustedClip)[b2] << 16)
                         | (0xFF << 24);
             }
         };
@@ -489,20 +725,41 @@
                                signed r1, signed g1, signed b1,
                                signed r2, signed g2, signed b2) {
             ((uint32_t *)dst_ptr)[0] =
-                    (kAdjustedClip[b1])
-                    | (kAdjustedClip[g1] << 8)
-                    | (kAdjustedClip[r1] << 16)
+                    (((uint8_t *)kAdjustedClip)[b1])
+                    | (((uint8_t *)kAdjustedClip)[g1] << 8)
+                    | (((uint8_t *)kAdjustedClip)[r1] << 16)
                     | (0xFF << 24);
 
             if (uncropped) {
                 ((uint32_t *)dst_ptr)[1] =
-                        (kAdjustedClip[b2])
-                        | (kAdjustedClip[g2] << 8)
-                        | (kAdjustedClip[r2] << 16)
+                        (((uint8_t *)kAdjustedClip)[b2])
+                        | (((uint8_t *)kAdjustedClip)[g2] << 8)
+                        | (((uint8_t *)kAdjustedClip)[r2] << 16)
                         | (0xFF << 24);
             }
         };
     }
+    case COLOR_Format32bitABGR2101010:
+    {
+        return [kAdjustedClip](void *dst_ptr, bool uncropped,
+                               signed r1, signed g1, signed b1,
+                               signed r2, signed g2, signed b2) {
+            ((uint32_t *)dst_ptr)[0] =
+                    (((uint16_t *)kAdjustedClip)[r1])
+                    | (((uint16_t *)kAdjustedClip)[g1] << 10)
+                    | (((uint16_t *)kAdjustedClip)[b1] << 20)
+                    | (3 << 30);
+
+            if (uncropped) {
+                ((uint32_t *)dst_ptr)[1] =
+                        (((uint16_t *)kAdjustedClip)[r2])
+                        | (((uint16_t *)kAdjustedClip)[g2] << 10)
+                        | (((uint16_t *)kAdjustedClip)[b2] << 20)
+                        | (3 << 30);
+            }
+        };
+    }
+
     default:
         TRESPASS();
     }
@@ -511,10 +768,22 @@
 
 status_t ColorConverter::convertYUV420Planar(
         const BitmapParams &src, const BitmapParams &dst) {
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+
     uint8_t *kAdjustedClip = initClip();
 
     auto readFromSrc = getReadFromSrc(mSrcFormat);
-    auto writeToDst = getWriteToDst(mDstFormat, kAdjustedClip);
+    auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
             + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
@@ -529,38 +798,20 @@
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            // B = 1.164 * (Y - 16) + 2.018 * (U - 128)
-            // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
-            // R = 1.164 * (Y - 16) + 1.596 * (V - 128)
-
-            // B = 298/256 * (Y - 16) + 517/256 * (U - 128)
-            // G = .................. - 208/256 * (V - 128) - 100/256 * (U - 128)
-            // R = .................. + 409/256 * (V - 128)
-
-            // min_B = (298 * (- 16) + 517 * (- 128)) / 256 = -277
-            // min_G = (298 * (- 16) - 208 * (255 - 128) - 100 * (255 - 128)) / 256 = -172
-            // min_R = (298 * (- 16) + 409 * (- 128)) / 256 = -223
-
-            // max_B = (298 * (255 - 16) + 517 * (255 - 128)) / 256 = 534
-            // max_G = (298 * (255 - 16) - 208 * (- 128) - 100 * (- 128)) / 256 = 432
-            // max_R = (298 * (255 - 16) + 409 * (255 - 128)) / 256 = 481
-
-            // clip range -278 .. 535
-
             signed y1, y2, u, v;
             readFromSrc(src_y, src_u, src_v, x, &y1, &y2, &u, &v);
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * 298;
+            signed tmp1 = (y1 - _c16) * _y + 128;
             signed b1 = (tmp1 + u_b) / 256;
             signed g1 = (tmp1 + v_g + u_g) / 256;
             signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * 298;
+            signed tmp2 = (y2 - _c16) * _y + 128;
             signed b2 = (tmp2 + u_b) / 256;
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
@@ -591,34 +842,84 @@
     return convertYUV420Planar(src, dst);
 }
 
-/*
- * Pack 10-bit YUV into RGBA_1010102.
- *
- * Media sends 10-bit YUV in a RGBA_1010102 format buffer. SF will handle
- * the conversion to RGB using RenderEngine fallback.
- *
- * We do not perform a YUV->RGB conversion here, however the conversion with
- * BT2020 to Full range is below for reference:
- *
- *   B = 1.168  *(Y - 64) + 2.148  *(U - 512)
- *   G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
- *   R = 1.168  *(Y - 64) + 1.683  *(V - 512)
- *
- *   B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
- *   G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
- *   R = .................... + 1723/1024  *(V - 512)
- *
- *   min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
- *   min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
- *   min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
- *
- *   max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
- *   max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
- *   max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
- *
- *   clip range -1175 .. 2218
- *
- */
+status_t ColorConverter::convertYUVP010(
+        const BitmapParams &src, const BitmapParams &dst) {
+    if (mDstFormat == COLOR_Format32bitABGR2101010) {
+        return convertYUVP010ToRGBA1010102(src, dst);
+    }
+
+    return ERROR_UNSUPPORTED;
+}
+
+status_t ColorConverter::convertYUVP010ToRGBA1010102(
+        const BitmapParams &src, const BitmapParams &dst) {
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 64 : 0;
+
+    uint16_t *kAdjustedClip10bit = initClip10Bit();
+
+//    auto readFromSrc = getReadFromSrc(mSrcFormat);
+    auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip10bit);
+
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+            + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+    uint16_t *src_y = (uint16_t *)((uint8_t *)src.mBits
+            + src.mCropTop * src.mStride + src.mCropLeft * src.mBpp);
+
+    uint16_t *src_uv = (uint16_t *)((uint8_t *)src.mBits
+            + src.mStride * src.mHeight
+            + (src.mCropTop / 2) * src.mStride + src.mCropLeft * src.mBpp);
+
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
+            signed y1, y2, u, v;
+            y1 = (src_y[x] >> 6) - _c16;
+            y2 = (src_y[x + 1] >> 6) - _c16;
+            u = int(src_uv[x] >> 6) - 512;
+            v = int(src_uv[x + 1] >> 6) - 512;
+
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
+
+            signed tmp1 = y1 * _y + 128;
+            signed b1 = (tmp1 + u_b) / 256;
+            signed g1 = (tmp1 + v_g + u_g) / 256;
+            signed r1 = (tmp1 + v_r) / 256;
+
+            signed tmp2 = y2 * _y + 128;
+            signed b2 = (tmp2 + u_b) / 256;
+            signed g2 = (tmp2 + v_g + u_g) / 256;
+            signed r2 = (tmp2 + v_r) / 256;
+
+            bool uncropped = x + 1 < src.cropWidth();
+
+            writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+        }
+
+        src_y += src.mStride / 2;
+
+        if (y & 1) {
+            src_uv += src.mStride / 2;
+        }
+
+        dst_ptr += dst.mStride;
+    }
+
+    return OK;
+}
+
 
 #if !USE_NEON_Y410
 
@@ -821,11 +1122,6 @@
 
 status_t ColorConverter::convertQCOMYUV420SemiPlanar(
         const BitmapParams &src, const BitmapParams &dst) {
-    uint8_t *kAdjustedClip = initClip();
-
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
-
     const uint8_t *src_y =
         (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
@@ -833,67 +1129,25 @@
         (const uint8_t *)src_y + src.mWidth * src.mHeight
         + src.mCropTop * src.mWidth + src.mCropLeft;
 
-    for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - 16;
-            signed y2 = (signed)src_y[x + 1] - 16;
+    /* QCOMYUV420SemiPlanar is NV21, while MediaCodec uses NV12 */
+    return convertYUV420SemiPlanarBase(
+            src, dst, src_y, src_u, src.mWidth /* row_inc */, true /* isNV21 */);
+}
 
-            signed u = (signed)src_u[x & ~1] - 128;
-            signed v = (signed)src_u[(x & ~1) + 1] - 128;
+status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
+        const BitmapParams &src, const BitmapParams &dst) {
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+    const uint8_t *src_u =
+        (const uint8_t *)src_y + src.mWidth * (src.mHeight - src.mCropTop / 2);
 
-            signed tmp1 = y1 * 298;
-            signed b1 = (tmp1 + u_b) / 256;
-            signed g1 = (tmp1 + v_g + u_g) / 256;
-            signed r1 = (tmp1 + v_r) / 256;
-
-            signed tmp2 = y2 * 298;
-            signed b2 = (tmp2 + u_b) / 256;
-            signed g2 = (tmp2 + v_g + u_g) / 256;
-            signed r2 = (tmp2 + v_r) / 256;
-
-            uint32_t rgb1 =
-                ((kAdjustedClip[b1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[r1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[b2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[r2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
-        }
-
-        src_y += src.mWidth;
-
-        if (y & 1) {
-            src_u += src.mWidth;
-        }
-
-        dst_ptr += dst.mWidth;
-    }
-
-    return OK;
+    return convertYUV420SemiPlanarBase(
+            src, dst, src_y, src_u, src.mWidth /* row_inc */);
 }
 
 status_t ColorConverter::convertYUV420SemiPlanar(
         const BitmapParams &src, const BitmapParams &dst) {
-    // XXX Untested
-
-    uint8_t *kAdjustedClip = initClip();
-
-    uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
-            dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
-
     const uint8_t *src_y =
         (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
 
@@ -901,90 +1155,49 @@
         (const uint8_t *)src.mBits + src.mHeight * src.mStride +
         (src.mCropTop / 2) * src.mStride + src.mCropLeft;
 
-    for (size_t y = 0; y < src.cropHeight(); ++y) {
-        for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - 16;
-            signed y2 = (signed)src_y[x + 1] - 16;
-
-            signed v = (signed)src_u[x & ~1] - 128;
-            signed u = (signed)src_u[(x & ~1) + 1] - 128;
-
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
-
-            signed tmp1 = y1 * 298;
-            signed b1 = (tmp1 + u_b) / 256;
-            signed g1 = (tmp1 + v_g + u_g) / 256;
-            signed r1 = (tmp1 + v_r) / 256;
-
-            signed tmp2 = y2 * 298;
-            signed b2 = (tmp2 + u_b) / 256;
-            signed g2 = (tmp2 + v_g + u_g) / 256;
-            signed r2 = (tmp2 + v_r) / 256;
-
-            uint32_t rgb1 =
-                ((kAdjustedClip[b1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[r1] >> 3);
-
-            uint32_t rgb2 =
-                ((kAdjustedClip[b2] >> 3) << 11)
-                | ((kAdjustedClip[g2] >> 2) << 5)
-                | (kAdjustedClip[r2] >> 3);
-
-            if (x + 1 < src.cropWidth()) {
-                *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
-            } else {
-                dst_ptr[x] = rgb1;
-            }
-        }
-
-        src_y += src.mStride;
-
-        if (y & 1) {
-            src_u += src.mStride;
-        }
-
-        dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
-    }
-
-    return OK;
+    return convertYUV420SemiPlanarBase(
+            src, dst, src_y, src_u, src.mStride /* row_inc */);
 }
 
-status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
-        const BitmapParams &src, const BitmapParams &dst) {
+status_t ColorConverter::convertYUV420SemiPlanarBase(
+        const BitmapParams &src, const BitmapParams &dst,
+        const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21) {
+    const struct Coeffs *matrix = getMatrix();
+    if (!matrix) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    signed _b_u = matrix->_b_u;
+    signed _neg_g_u = -matrix->_g_u;
+    signed _neg_g_v = -matrix->_g_v;
+    signed _r_v = matrix->_r_v;
+    signed _y = matrix->_y;
+    signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+
     uint8_t *kAdjustedClip = initClip();
 
-    uint16_t *dst_ptr = (uint16_t *)dst.mBits
-        + dst.mCropTop * dst.mWidth + dst.mCropLeft;
-
-    const uint8_t *src_y =
-        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
-
-    const uint8_t *src_u =
-        (const uint8_t *)src_y + src.mWidth * (src.mHeight - src.mCropTop / 2);
+    uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
+            dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
 
     for (size_t y = 0; y < src.cropHeight(); ++y) {
         for (size_t x = 0; x < src.cropWidth(); x += 2) {
-            signed y1 = (signed)src_y[x] - 16;
-            signed y2 = (signed)src_y[x + 1] - 16;
+            signed y1 = (signed)src_y[x] - _c16;
+            signed y2 = (signed)src_y[x + 1] - _c16;
 
-            signed u = (signed)src_u[x & ~1] - 128;
-            signed v = (signed)src_u[(x & ~1) + 1] - 128;
+            signed u = (signed)src_u[(x & ~1) + isNV21] - 128;
+            signed v = (signed)src_u[(x & ~1) + !isNV21] - 128;
 
-            signed u_b = u * 517;
-            signed u_g = -u * 100;
-            signed v_g = -v * 208;
-            signed v_r = v * 409;
+            signed u_b = u * _b_u;
+            signed u_g = u * _neg_g_u;
+            signed v_g = v * _neg_g_v;
+            signed v_r = v * _r_v;
 
-            signed tmp1 = y1 * 298;
+            signed tmp1 = y1 * _y + 128;
             signed b1 = (tmp1 + u_b) / 256;
             signed g1 = (tmp1 + v_g + u_g) / 256;
             signed r1 = (tmp1 + v_r) / 256;
 
-            signed tmp2 = y2 * 298;
+            signed tmp2 = y2 * _y + 128;
             signed b2 = (tmp2 + u_b) / 256;
             signed g2 = (tmp2 + v_g + u_g) / 256;
             signed r2 = (tmp2 + v_r) / 256;
@@ -1006,31 +1219,40 @@
             }
         }
 
-        src_y += src.mWidth;
+        src_y += row_inc;
 
         if (y & 1) {
-            src_u += src.mWidth;
+            src_u += row_inc;
         }
 
-        dst_ptr += dst.mWidth;
+        dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
     }
 
     return OK;
 }
 
 uint8_t *ColorConverter::initClip() {
-    static const signed kClipMin = -278;
-    static const signed kClipMax = 535;
-
     if (mClip == NULL) {
-        mClip = new uint8_t[kClipMax - kClipMin + 1];
+        mClip = new uint8_t[CLIP_RANGE_MAX_8BIT - CLIP_RANGE_MIN_8BIT + 1];
 
-        for (signed i = kClipMin; i <= kClipMax; ++i) {
-            mClip[i - kClipMin] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
+        for (signed i = CLIP_RANGE_MIN_8BIT; i <= CLIP_RANGE_MAX_8BIT; ++i) {
+            mClip[i - CLIP_RANGE_MIN_8BIT] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i;
         }
     }
 
-    return &mClip[-kClipMin];
+    return &mClip[-CLIP_RANGE_MIN_8BIT];
+}
+
+uint16_t *ColorConverter::initClip10Bit() {
+    if (mClip10Bit == NULL) {
+        mClip10Bit = new uint16_t[CLIP_RANGE_MAX_10BIT - CLIP_RANGE_MIN_10BIT + 1];
+
+        for (signed i = CLIP_RANGE_MIN_10BIT; i <= CLIP_RANGE_MAX_10BIT; ++i) {
+            mClip10Bit[i - CLIP_RANGE_MIN_10BIT] = (i < 0) ? 0 : (i > 1023) ? 1023 : (uint16_t)i;
+        }
+    }
+
+    return &mClip10Bit[-CLIP_RANGE_MIN_10BIT];
 }
 
 }  // namespace android
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 04041eb..3509ef8 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -79,7 +79,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.av1.decoder" type="video/av01">
             <Limit name="size" min="96x96" max="1920x1080" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Limit name="blocks-per-second" min="24" max="2073600" />
             <Limit name="bitrate" range="1-120000000" />
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index a4e3425..d7e2d18 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -91,11 +91,11 @@
         <MediaCodec name="c2.android.mpeg4.decoder" type="video/mp4v-es">
             <Alias name="OMX.google.mpeg4.decoder" />
             <!-- profiles and levels:  ProfileSimple : Level3 -->
-            <Limit name="size" min="2x2" max="352x288" />
+            <Limit name="size" min="2x2" max="1920x1920" />
             <Limit name="alignment" value="2x2" />
-            <Limit name="block-size" value="16x16" />
-            <Limit name="blocks-per-second" range="12-11880" />
-            <Limit name="bitrate" range="1-384000" />
+            <Limit name="block-count" range="1-14400" />
+            <Limit name="blocks-per-second" range="1-432000" />
+            <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="c2.android.h263.decoder" type="video/3gpp">
@@ -184,7 +184,7 @@
         </MediaCodec>
         <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="!slow-cpu">
             <Limit name="size" min="2x2" max="2048x2048" />
-            <Limit name="alignment" value="2x2" />
+            <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
             <Limit name="block-count" range="1-16384" />
             <Limit name="blocks-per-second" range="1-2073600" />
diff --git a/media/libstagefright/filters/Android.bp b/media/libstagefright/filters/Android.bp
index acc9e87..e6d59ad 100644
--- a/media/libstagefright/filters/Android.bp
+++ b/media/libstagefright/filters/Android.bp
@@ -22,8 +22,12 @@
         "ZeroFilter.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/native/include/media/openmax",
+    export_include_dirs: [
+        "include",
+    ],
+
+    local_include_dirs: [
+        "include/filters",
     ],
 
     cflags: [
diff --git a/media/libstagefright/filters/ColorConvert.h b/media/libstagefright/filters/include/filters/ColorConvert.h
similarity index 100%
rename from media/libstagefright/filters/ColorConvert.h
rename to media/libstagefright/filters/include/filters/ColorConvert.h
diff --git a/media/libstagefright/filters/GraphicBufferListener.h b/media/libstagefright/filters/include/filters/GraphicBufferListener.h
similarity index 100%
rename from media/libstagefright/filters/GraphicBufferListener.h
rename to media/libstagefright/filters/include/filters/GraphicBufferListener.h
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h
similarity index 100%
rename from media/libstagefright/filters/IntrinsicBlurFilter.h
rename to media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h
diff --git a/media/libstagefright/filters/RSFilter.h b/media/libstagefright/filters/include/filters/RSFilter.h
similarity index 100%
rename from media/libstagefright/filters/RSFilter.h
rename to media/libstagefright/filters/include/filters/RSFilter.h
diff --git a/media/libstagefright/filters/SaturationFilter.h b/media/libstagefright/filters/include/filters/SaturationFilter.h
similarity index 100%
rename from media/libstagefright/filters/SaturationFilter.h
rename to media/libstagefright/filters/include/filters/SaturationFilter.h
diff --git a/media/libstagefright/filters/SimpleFilter.h b/media/libstagefright/filters/include/filters/SimpleFilter.h
similarity index 100%
rename from media/libstagefright/filters/SimpleFilter.h
rename to media/libstagefright/filters/include/filters/SimpleFilter.h
diff --git a/media/libstagefright/filters/ZeroFilter.h b/media/libstagefright/filters/include/filters/ZeroFilter.h
similarity index 100%
rename from media/libstagefright/filters/ZeroFilter.h
rename to media/libstagefright/filters/include/filters/ZeroFilter.h
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 665aae1..83fcc01 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -21,6 +21,12 @@
     name: "libstagefright_flacdec",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+
     host_supported: true,
 
     srcs: [
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 0a4e598..4334f1e 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -19,6 +19,8 @@
 #include <utils/Log.h>
 #include <utils/String8.h>
 
+#include <inttypes.h>
+
 #include "ALooperRoster.h"
 
 #include "ADebug.h"
@@ -142,7 +144,7 @@
             sp<AHandler> handler = info.mHandler.promote();
             if (handler != NULL) {
                 handler->mVerboseStats = verboseStats;
-                s.appendFormat(": %u messages processed", handler->mMessageCounter);
+                s.appendFormat(": %" PRIu64 " messages processed", handler->mMessageCounter);
                 if (verboseStats) {
                     for (size_t j = 0; j < handler->mMessages.size(); j++) {
                         char fourcc[15];
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index c2114b3..5c99cc9 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -33,7 +33,7 @@
 
 #include <media/stagefright/foundation/hexdump.h>
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -659,7 +659,7 @@
     return s;
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 // static
 sp<AMessage> AMessage::FromParcel(const Parcel &parcel, size_t maxNestingLevel) {
     int32_t what = parcel.readInt32();
@@ -825,7 +825,7 @@
         }
     }
 }
-#endif  // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif  // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 sp<AMessage> AMessage::changesFrom(const sp<const AMessage> &other, bool deep) const {
     if (other == NULL) {
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index b1ed077..a5e0ff8 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -27,7 +27,7 @@
 #include "ADebug.h"
 #include "AString.h"
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -365,7 +365,7 @@
     return !strcasecmp(mData + mSize - suffixLen, suffix);
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 // static
 AString AString::FromParcel(const Parcel &parcel) {
     size_t size = static_cast<size_t>(parcel.readInt32());
@@ -380,7 +380,7 @@
     }
     return err;
 }
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 AString AStringPrintf(const char *format, ...) {
     va_list ap;
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index dd2c66f..ca17117 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -23,6 +23,11 @@
     vendor_available: true,
     host_supported: true,
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
 }
 
 cc_defaults {
@@ -33,18 +38,13 @@
     },
     host_supported: true,
     double_loadable: true,
-    include_dirs: [
-        "frameworks/av/include",
-        "frameworks/native/include",
-        "frameworks/native/libs/arect/include",
-        "frameworks/native/libs/nativebase/include",
-    ],
 
     local_include_dirs: [
         "include/media/stagefright/foundation",
     ],
 
     header_libs: [
+        "av-headers",
         // this is only needed for the vendor variant that removes libbinder, but vendor
         // target below does not allow adding header_libs.
         "libbinder_headers",
@@ -120,8 +120,6 @@
         },
     },
 
-    clang: true,
-
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -135,12 +133,19 @@
     name: "libstagefright_foundation",
     defaults: ["libstagefright_foundation_defaults"],
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
 }
 
 cc_library_static {
     name: "libstagefright_foundation_without_imemory",
     defaults: ["libstagefright_foundation_defaults"],
     min_sdk_version: "29",
+    apex_available: ["com.android.media"],
+
 
     cflags: [
         "-Wno-multichar",
@@ -165,7 +170,7 @@
 
     shared_libs: [
         "liblog",
-        "libutils",             // for sp<>
+        "libutils", // for sp<>
         // actually invokes this, but called from folks who already load it
         // "libmediandk",
     ],
@@ -193,8 +198,6 @@
         "ColorUtils_fill.cpp",
     ],
 
-    clang: true,
-
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -211,4 +214,3 @@
     ],
 
 }
-
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index fa722b5..6dc8157 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -590,9 +590,10 @@
     uint32_t gfxRange = range;
     uint32_t gfxStandard = standard;
     uint32_t gfxTransfer = transfer;
-    // TRICKY: use & to ensure all three mappings are completed
-    if (!(sGfxRanges.map(range, &gfxRange) & sGfxStandards.map(standard, &gfxStandard)
-            & sGfxTransfers.map(transfer, &gfxTransfer))) {
+    bool mappedRange = sGfxRanges.map(range, &gfxRange);
+    bool mappedStandard = sGfxStandards.map(standard, &gfxStandard);
+    bool mappedTransfer = sGfxTransfers.map(transfer, &gfxTransfer);
+    if (! (mappedRange && mappedStandard && mappedTransfer)) {
         ALOGW("could not safely map platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s) to "
               "graphics dataspace (R:%u S:%u T:%u)",
               range, asString(range), standard, asString(standard), transfer, asString(transfer),
@@ -626,9 +627,10 @@
     CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
     CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
     CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
-    // TRICKY: use & to ensure all three mappings are completed
-    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
-            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+    bool mappedRange = sGfxRanges.map(gfxRange, &cuRange);
+    bool mappedStandard = sGfxStandards.map(gfxStandard, &cuStandard);
+    bool mappedTransfer = sGfxTransfers.map(gfxTransfer, &cuTransfer);
+    if (! (mappedRange && mappedStandard && mappedTransfer)) {
         ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
               "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
               gfxRange, gfxStandard, gfxTransfer,
@@ -781,5 +783,14 @@
     return true;
 }
 
+// static
+bool ColorUtils::isHDRStaticInfoValid(HDRStaticInfo *info) {
+    if (info->sType1.mMaxDisplayLuminance > 0.0f
+        && info->sType1.mMinDisplayLuminance > 0.0f)  return true;
+    if (info->sType1.mMaxContentLightLevel > 0.0f
+        && info->sType1.mMaxFrameAverageLightLevel > 0.0f)  return true;
+    return false;
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
index ada5d81..5c4ec17 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -60,12 +60,66 @@
 const char *MEDIA_MIMETYPE_AUDIO_AC4 = "audio/ac4";
 const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1 = "audio/mha1";
 const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1 = "audio/mhm1";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3 = "audio/mhm1.03";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4 = "audio/mhm1.04";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3 = "audio/mhm1.0d";
+const char *MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4 = "audio/mhm1.0e";
 const char *MEDIA_MIMETYPE_AUDIO_SCRAMBLED = "audio/scrambled";
 const char *MEDIA_MIMETYPE_AUDIO_ALAC = "audio/alac";
 const char *MEDIA_MIMETYPE_AUDIO_WMA = "audio/x-ms-wma";
 const char *MEDIA_MIMETYPE_AUDIO_MS_ADPCM = "audio/x-adpcm-ms";
 const char *MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM = "audio/x-adpcm-dvi-ima";
-
+const char *MEDIA_MIMETYPE_AUDIO_DTS = "audio/vnd.dts";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_HD = "audio/vnd.dts.hd";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD = "audio/vnd.dts.uhd";
+const char *MEDIA_MIMETYPE_AUDIO_EVRC = "audio/evrc";
+const char *MEDIA_MIMETYPE_AUDIO_EVRCB = "audio/evrcb";
+const char *MEDIA_MIMETYPE_AUDIO_EVRCWB = "audio/evrcwb";
+const char *MEDIA_MIMETYPE_AUDIO_EVRCNW = "audio/evrcnw";
+const char *MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS = "audio/amr-wb+";
+const char *MEDIA_MIMETYPE_AUDIO_APTX = "audio/aptx";
+const char *MEDIA_MIMETYPE_AUDIO_DRA = "audio/vnd.dra";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT = "audio/vnd.dolby.mat";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0 = "audio/vnd.dolby.mat.1.0";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0 = "audio/vnd.dolby.mat.2.0";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1 = "audio/vnd.dolby.mat.2.1";
+const char *MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD = "audio/vnd.dolby.mlp";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_MP4 = "audio/mp4a.40";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_MAIN = "audio/mp4a.40.01";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_LC = "audio/mp4a.40.02";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_SSR = "audio/mp4a.40.03";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_LTP = "audio/mp4a.40.04";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_HE_V1 = "audio/mp4a.40.05";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE = "audio/mp4a.40.06";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ERLC = "audio/mp4a.40.17";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_LD = "audio/mp4a.40.23";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_HE_V2 = "audio/mp4a.40.29";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ELD = "audio/mp4a.40.39";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_XHE = "audio/mp4a.40.42";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADIF = "audio/aac-adif";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN = "audio/aac-adts.01";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC = "audio/aac-adts.02";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR = "audio/aac-adts.03";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP = "audio/aac-adts.04";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1 = "audio/aac-adts.05";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE = "audio/aac-adts.06";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC = "audio/aac-adts.17";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD = "audio/aac-adts.23";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2 = "audio/aac-adts.29";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD = "audio/aac-adts.39";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE = "audio/aac-adts.42";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC = "audio/mp4a-latm.02";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1 = "audio/mp4a-latm.05";
+const char *MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2 = "audio/mp4a-latm.29";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_IEC61937 = "audio/x-iec61937";
+// Note: not in the IANA registry.
+const char *MEDIA_MIMETYPE_AUDIO_IEC60958 = "audio/x-iec60958";
 
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mp4";
 const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/x-wav";
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index 7f48cfd..77913d5 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -28,7 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MetaData.h>
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -48,7 +48,7 @@
 MetaData::~MetaData() {
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 /* static */
 sp<MetaData> MetaData::createFromParcel(const Parcel &parcel) {
 
diff --git a/media/libstagefright/foundation/MetaDataBase.cpp b/media/libstagefright/foundation/MetaDataBase.cpp
index 3f050ea..3370748 100644
--- a/media/libstagefright/foundation/MetaDataBase.cpp
+++ b/media/libstagefright/foundation/MetaDataBase.cpp
@@ -28,7 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MetaDataBase.h>
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -452,7 +452,7 @@
     }
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 status_t MetaDataBase::writeToParcel(Parcel &parcel) {
     status_t ret;
     size_t numItems = mInternalData->mItems.size();
@@ -521,9 +521,64 @@
                 }
                 setData(key, type, blob.data(), size);
                 blob.release();
+            } else if (type == TYPE_C_STRING) {
+                // copy data directly from Parcel storage, then advance position
+                // NB: readInplace() bumps position, it is NOT idempotent.
+                const void *src = parcel.readInplace(size);
+                char *str = (char *) src;
+                if (src == nullptr || size == 0 || str[size-1] != '\0') {
+                    char ccKey[5];
+                    MakeFourCCString(key, ccKey);
+                    if (src == nullptr) {
+                        ALOGW("ignoring key '%s' string with no data (expected %d)", ccKey, size);
+                    } else {
+                        ALOGW("ignoring key '%s': unterminated string of %d bytes", ccKey, size);
+                    }
+                } else {
+                    setData(key, type, src, size);
+                }
             } else {
                 // copy data directly from Parcel storage, then advance position
-                setData(key, type, parcel.readInplace(size), size);
+                // verify that the received size is enough
+                uint32_t needed = 0;
+                switch (type) {
+                    case TYPE_INT32:
+                        needed = sizeof(int32_t);
+                        break;
+                    case TYPE_INT64:
+                        needed = sizeof(int64_t);
+                        break;
+                    case TYPE_FLOAT:
+                        needed = sizeof(float);
+                        break;
+                    case TYPE_POINTER:
+                        // NB: this rejects passing between 32-bit and 64-bit space.
+                        needed = sizeof(void*);
+                        break;
+                    case TYPE_RECT:
+                        needed = sizeof(Rect);
+                        break;
+                    default:
+                        // non-standard entities can be any size >= 0
+                        needed = 0;
+                        break;
+                }
+                const void *src = parcel.readInplace(size);
+                if (src == nullptr || (needed != 0 && size != needed)) {
+                    char ccKey[5];
+                    MakeFourCCString(key, ccKey);
+                    char ccType[5];
+                    MakeFourCCString(type, ccType);
+                    if (src == nullptr) {
+                        ALOGW("ignoring key '%s' type '%s' missing data (expected %d)",
+                              ccKey, ccType, size);
+                    } else {
+                        ALOGW("ignoring key '%s': type '%s' bytes: expected %d != %d received",
+                               ccKey, ccType, needed, size);
+                    }
+                } else {
+                    setData(key, type, src, size);
+                }
             }
          }
 
@@ -532,7 +587,7 @@
     ALOGW("no metadata in parcel");
     return UNKNOWN_ERROR;
 }
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 }  // namespace android
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
index 53d8a9b..337460a 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AHandler.h
@@ -66,7 +66,7 @@
     }
 
     bool mVerboseStats;
-    uint32_t mMessageCounter;
+    uint64_t mMessageCounter;
     KeyedVector<uint32_t, uint32_t> mMessages;
 
     void deliverMessage(const sp<AMessage> &msg);
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index a2b6c4f..72c8074 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -193,6 +193,9 @@
     static void setHDRStaticInfoIntoAMediaFormat(const HDRStaticInfo &info, AMediaFormat *format);
     // (internal) used by the setHDRStaticInfoInfo* routines
     static void fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data);
+
+    // determine whether HDR static info is valid
+    static bool isHDRStaticInfoValid(HDRStaticInfo *info);
 };
 
 inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index f5cecef..fb8c299 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -62,12 +62,59 @@
 extern const char *MEDIA_MIMETYPE_AUDIO_AC4;
 extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1;
 extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4;
 extern const char *MEDIA_MIMETYPE_AUDIO_SCRAMBLED;
 extern const char *MEDIA_MIMETYPE_AUDIO_ALAC;
 extern const char *MEDIA_MIMETYPE_AUDIO_WMA;
 extern const char *MEDIA_MIMETYPE_AUDIO_MS_ADPCM;
 extern const char *MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM;
-
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD;
+extern const char *MEDIA_MIMETYPE_AUDIO_EVRC;
+extern const char *MEDIA_MIMETYPE_AUDIO_EVRCB;
+extern const char *MEDIA_MIMETYPE_AUDIO_EVRCWB;
+extern const char *MEDIA_MIMETYPE_AUDIO_EVRCNW;
+extern const char *MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS;
+extern const char *MEDIA_MIMETYPE_AUDIO_APTX;
+extern const char *MEDIA_MIMETYPE_AUDIO_DRA;
+extern const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT;
+extern const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0;
+extern const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0;
+extern const char *MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1;
+extern const char *MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_MP4;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_MAIN;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_LC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_SSR;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_LTP;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_HE_V1;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ERLC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_LD;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_HE_V2;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ELD;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_XHE;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADIF;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1;
+extern const char *MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2;
+extern const char *MEDIA_MIMETYPE_AUDIO_IEC61937;
+extern const char *MEDIA_MIMETYPE_AUDIO_IEC60958;
 
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG4;
 extern const char *MEDIA_MIMETYPE_CONTAINER_WAV;
diff --git a/media/libstagefright/foundation/tests/AVCUtils/Android.bp b/media/libstagefright/foundation/tests/AVCUtils/Android.bp
index 594da56..ee7db21 100644
--- a/media/libstagefright/foundation/tests/AVCUtils/Android.bp
+++ b/media/libstagefright/foundation/tests/AVCUtils/Android.bp
@@ -43,10 +43,6 @@
         "libstagefright_foundation",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/foundation",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/foundation/tests/Android.bp b/media/libstagefright/foundation/tests/Android.bp
index e50742e..e72ce43 100644
--- a/media/libstagefright/foundation/tests/Android.bp
+++ b/media/libstagefright/foundation/tests/Android.bp
@@ -18,10 +18,6 @@
         "-Wall",
     ],
 
-    include_dirs: [
-        "frameworks/av/include",
-    ],
-
     shared_libs: [
         "liblog",
         "libstagefright_foundation",
diff --git a/media/libstagefright/http/Android.bp b/media/libstagefright/http/Android.bp
index f4d6d99..f25318d 100644
--- a/media/libstagefright/http/Android.bp
+++ b/media/libstagefright/http/Android.bp
@@ -12,10 +12,8 @@
 
     srcs: ["HTTPHelper.cpp"],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/native/include/media/openmax",
-        "frameworks/base/core/jni",
+    header_libs: [
+        "libstagefright_headers",
     ],
 
     shared_libs: [
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index 0b0acbf..7e26bd6 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -28,11 +28,6 @@
         "PlaylistFetcher.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/native/include/media/openmax",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
@@ -65,6 +60,8 @@
 
     header_libs: [
         "libbase_headers",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
     ],
 
     static_libs: [
@@ -74,3 +71,8 @@
     ],
 
 }
+
+cc_library_headers {
+    name: "libstagefright_httplive_headers",
+    export_include_dirs: ["."],
+}
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 3bad015..09ca1c9 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -23,7 +23,7 @@
 #include "M3UParser.h"
 #include "PlaylistFetcher.h"
 
-#include "mpeg2ts/AnotherPacketSource.h"
+#include <mpeg2ts/AnotherPacketSource.h>
 
 #include <cutils/properties.h>
 #include <media/MediaHTTPService.h>
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 7a6d487..ed38a2e 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -24,7 +24,7 @@
 
 #include <utils/String8.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <mpeg2ts/ATSParser.h>
 
 namespace android {
 
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index b23aa8a..b339fd2 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -24,9 +24,9 @@
 #include "HTTPDownloader.h"
 #include "LiveSession.h"
 #include "M3UParser.h"
-#include "include/ID3.h"
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/HlsSampleDecryptor.h"
+#include <ID3.h>
+#include <mpeg2ts/AnotherPacketSource.h>
+#include <mpeg2ts/HlsSampleDecryptor.h>
 
 #include <datasource/DataURISource.h>
 #include <media/stagefright/foundation/ABitReader.h>
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 5d3f9c1..716df63 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -21,7 +21,7 @@
 #include <media/stagefright/foundation/AHandler.h>
 #include <openssl/aes.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <mpeg2ts/ATSParser.h>
 #include "LiveSession.h"
 
 namespace android {
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
new file mode 100644
index 0000000..85fd8b7
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_httplive_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_httplive_license",
+    ],
+}
+
+cc_fuzz {
+    name: "httplive_fuzzer",
+    srcs: [
+        "httplive_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_httplive",
+        "libstagefright_id3",
+        "libstagefright_metadatautils",
+        "libstagefright_mpeg2support",
+        "liblog",
+        "libcutils",
+        "libdatasource",
+        "libmedia",
+        "libstagefright",
+    ],
+    header_libs: [
+        "libbase_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
+    ],
+    shared_libs: [
+        "libcrypto",
+        "libstagefright_foundation",
+        "libhidlbase",
+        "libhidlmemory",
+        "libutils",
+        "android.hidl.allocator@1.0",
+    ],
+    corpus: ["corpus/*"],
+    dictionary: "httplive_fuzzer.dict",
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/httplive/fuzzer/README.md b/media/libstagefright/httplive/fuzzer/README.md
new file mode 100644
index 0000000..3a64ea4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libstagefright_httplive
+
+## Plugin Design Considerations
+The fuzzer plugin for libstagefright_httplive is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data.Also, several .m3u8 files are hand-crafted and added to the corpus directory to increase the code coverage. This ensures more code paths are reached by the fuzzer.
+
+libstagefright_httplive supports the following parameters:
+1. Final Result (parameter name: `finalResult`)
+2. Flags (parameter name: `flags`)
+3. Time Us (parameter name: `timeUs`)
+4. Track Index (parameter name: `trackIndex`)
+5. Index (parameter name: `index`)
+6. Select (parameter name: `select`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `finalResult` | `-34` to `-1` | Value obtained from FuzzedDataProvider|
+| `flags` | `0` to `1` | Value obtained from FuzzedDataProvider|
+| `timeUs` | `0` to `10000000` | Value obtained from FuzzedDataProvider|
+| `trackIndex` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `index` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `select` | `True` to `False` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the httplive module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build httplive_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) httplive_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/lib /data/fuzz/$(TARGET_ARCH)/lib
+  $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/httplive_fuzzer /data/fuzz/$(TARGET_ARCH)/httplive_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/httplive_fuzzer /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/corpus
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/httplive/fuzzer/corpus/crypt.key b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
new file mode 100644
index 0000000..f9d5d7f
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
@@ -0,0 +1,2 @@

+ÏŒüÐ5Љ_xïHÎ3
diff --git a/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
new file mode 100644
index 0000000..32b0eac
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-ALLOW-CACHE:YES
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-VERSION:3
+#EXT-X-MEDIA-SEQUENCE:1
+#EXT-X-KEY:METHOD=AES-128,URI="../../fuzz/arm64/httplive_fuzzer/corpus/crypt.key"
+#EXTINF:10.000,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5.092,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
new file mode 100644
index 0000000..9338e04
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
@@ -0,0 +1,8 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-MEDIA-SEQUENCE:0
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
new file mode 100644
index 0000000..e1eff58
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
new file mode 100644
index 0000000..37a0189
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
@@ -0,0 +1,6 @@
+#EXTM3U
+#EXT-X-INDEPENDENT-SEGMENTS
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=165340,RESOLUTION=256x144,CODECS="mp4a.40.5,avc1.42c00b"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index.m3u8
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=344388,RESOLUTION=426x240,CODECS="mp4a.40.5,avc1.4d4015"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
new file mode 100644
index 0000000..1b7f489
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
new file mode 100644
index 0000000..89ba37c
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
@@ -0,0 +1,15 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=SAMPLE-AES,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
new file mode 100644
index 0000000..2120de4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:11
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-MEDIA-SEQUENCE:0
+#EXT-X-VERSION:4
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:10@0
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:20@10
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:80
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
new file mode 100644
index 0000000..588368a
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0x30303030303030303030303030303030
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
new file mode 100644
index 0000000..b09948e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
@@ -0,0 +1,46 @@
+#EXTM3U
+#EXT-X-VERSION:4
+## Created with Unified Streaming Platform  (version=1.11.3-24438)
+#EXT-X-SESSION-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key"
+
+# AUDIO groups
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-64",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-128",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+
+# SUBTITLES groups
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_eng=1000.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="ru",NAME="Russian",AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_rus=1000.m3u8"
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=494000,CODECS="mp4a.40.2,avc1.42C00D",RESOLUTION=224x100,FRAME-RATE=24,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng=401000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=933000,CODECS="mp4a.40.2,avc1.42C016",RESOLUTION=448x200,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=751000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1198000,CODECS="mp4a.40.2,avc1.4D401F",RESOLUTION=784x350,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1001000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1501000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2469000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=2200000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=1025000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng_1=902000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1368000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=2576x1150,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1161000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1815000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=3360x1500,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1583000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=69000,CODECS="mp4a.40.2",AUDIO="audio-aacl-64",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=64008.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=137000,CODECS="mp4a.40.2",AUDIO="audio-aacl-128",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=128002.m3u8
+
+# keyframes
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=54000,CODECS="avc1.42C00D",RESOLUTION=224x100,URI="keyframes/tears-of-steel-aes-video_eng=401000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=100000,CODECS="avc1.42C016",RESOLUTION=448x200,URI="keyframes/tears-of-steel-aes-video_eng=751000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=133000,CODECS="avc1.4D401F",RESOLUTION=784x350,URI="keyframes/tears-of-steel-aes-video_eng=1001000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=120000,CODECS="hvc1.1.6.L150.90",RESOLUTION=1680x750,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=902000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=154000,CODECS="hvc1.1.6.L150.90",RESOLUTION=2576x1150,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1161000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=210000,CODECS="hvc1.1.6.L150.90",RESOLUTION=3360x1500,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1583000.m3u8"
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
new file mode 100644
index 0000000..353d589
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:5
+
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="English stereo",LANGUAGE="en",AUTOSELECT=YES,URI="../../fuzz/arm64/httplive_fuzzer/index1.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=628000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=320x180,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=928000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=480x270,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index2.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=640x360,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index3.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2528000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=960x540,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
new file mode 100644
index 0000000..eb88422
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
@@ -0,0 +1,17 @@
+#EXTM3U
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="eng",NAME="English",AUTOSELECT=YES,DEFAULT=YES,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="fre",NAME="Français",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="sp",NAME="Espanol",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
new file mode 100644
index 0000000..aa777b3
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <LiveDataSource.h>
+#include <LiveSession.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/mediaplayer_common.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
+#include <string>
+#include <utils/Log.h>
+
+using namespace std;
+using namespace android;
+
+constexpr char kFileNamePrefix[] = "/data/local/tmp/httplive-";
+constexpr char kFileNameSuffix[] = ".m3u8";
+constexpr char kFileUrlPrefix[] = "file://";
+constexpr int64_t kOffSet = 0;
+constexpr int32_t kReadyMarkMs = 5000;
+constexpr int32_t kPrepareMarkMs = 1500;
+constexpr int32_t kErrorNoMax = -1;
+constexpr int32_t kErrorNoMin = -34;
+constexpr int32_t kMaxTimeUs = 1000;
+constexpr int32_t kRandomStringLength = 64;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+constexpr LiveSession::StreamType kValidStreamType[] = {
+    LiveSession::STREAMTYPE_AUDIO, LiveSession::STREAMTYPE_VIDEO,
+    LiveSession::STREAMTYPE_SUBTITLES, LiveSession::STREAMTYPE_METADATA};
+
+constexpr MediaSource::ReadOptions::SeekMode kValidSeekMode[] = {
+    MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+    MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX};
+
+constexpr media_track_type kValidMediaTrackType[] = {
+    MEDIA_TRACK_TYPE_UNKNOWN,  MEDIA_TRACK_TYPE_VIDEO,
+    MEDIA_TRACK_TYPE_AUDIO,    MEDIA_TRACK_TYPE_TIMEDTEXT,
+    MEDIA_TRACK_TYPE_SUBTITLE, MEDIA_TRACK_TYPE_METADATA};
+
+struct TestAHandler : public AHandler {
+public:
+  TestAHandler(std::function<void()> signalEosFunction)
+      : mSignalEosFunction(signalEosFunction) {}
+  virtual ~TestAHandler() {}
+
+protected:
+  void onMessageReceived(const sp<AMessage> &msg) override {
+    int32_t what = -1;
+    msg->findInt32("what", &what);
+    switch (what) {
+    case LiveSession::kWhatError:
+    case LiveSession::kWhatPrepared:
+    case LiveSession::kWhatPreparationFailed: {
+      mSignalEosFunction();
+      break;
+    }
+    }
+    return;
+  }
+
+private:
+  std::function<void()> mSignalEosFunction;
+};
+
+struct TestMediaHTTPConnection : public MediaHTTPConnection {
+public:
+  TestMediaHTTPConnection() {}
+  virtual ~TestMediaHTTPConnection() {}
+
+  virtual bool connect(const char * /*uri*/,
+                       const KeyedVector<String8, String8> * /*headers*/) {
+    return true;
+  }
+
+  virtual void disconnect() { return; }
+
+  virtual ssize_t readAt(off64_t /*offset*/, void * /*data*/, size_t size) {
+    return size;
+  }
+
+  virtual off64_t getSize() { return 0; }
+  virtual status_t getMIMEType(String8 * /*mimeType*/) { return NO_ERROR; }
+  virtual status_t getUri(String8 * /*uri*/) { return NO_ERROR; }
+
+private:
+  DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPConnection);
+};
+
+struct TestMediaHTTPService : public MediaHTTPService {
+public:
+  TestMediaHTTPService() {}
+  ~TestMediaHTTPService(){};
+
+  virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+    mediaHTTPConnection = sp<TestMediaHTTPConnection>::make();
+    return mediaHTTPConnection;
+  }
+
+private:
+  sp<TestMediaHTTPConnection> mediaHTTPConnection = nullptr;
+  DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
+};
+
+class HttpLiveFuzzer {
+public:
+  void process(const uint8_t *data, size_t size);
+  void deInitLiveSession();
+  ~HttpLiveFuzzer() { deInitLiveSession(); }
+
+private:
+  void invokeLiveDataSource();
+  void createM3U8File(const uint8_t *data, size_t size);
+  void initLiveDataSource();
+  void invokeLiveSession();
+  void initLiveSession();
+  void invokeDequeueAccessUnit();
+  void invokeConnectAsync();
+  void invokeSeekTo();
+  void invokeGetConfig();
+  void signalEos();
+  string generateFileName();
+  sp<LiveDataSource> mLiveDataSource = nullptr;
+  sp<LiveSession> mLiveSession = nullptr;
+  sp<ALooper> mLiveLooper = nullptr;
+  sp<TestMediaHTTPService> httpService = nullptr;
+  sp<TestAHandler> mHandler = nullptr;
+  FuzzedDataProvider *mFDP = nullptr;
+  bool mEosReached = false;
+  std::mutex mDownloadCompleteMutex;
+  std::condition_variable mConditionalVariable;
+};
+
+string HttpLiveFuzzer::generateFileName() {
+  return kFileNamePrefix + to_string(getpid()) + kFileNameSuffix;
+}
+
+void HttpLiveFuzzer::createM3U8File(const uint8_t *data, size_t size) {
+  ofstream m3u8File;
+  string currentFileName = generateFileName();
+  m3u8File.open(currentFileName, ios::out | ios::binary);
+  m3u8File.write((char *)data, size);
+  m3u8File.close();
+}
+
+void HttpLiveFuzzer::initLiveDataSource() {
+  mLiveDataSource = sp<LiveDataSource>::make();
+}
+
+void HttpLiveFuzzer::invokeLiveDataSource() {
+  initLiveDataSource();
+  size_t size = mFDP->ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+  sp<ABuffer> buffer = new ABuffer(size);
+  mLiveDataSource->queueBuffer(buffer);
+  uint8_t *data = new uint8_t[size];
+  mLiveDataSource->readAtNonBlocking(kOffSet, data, size);
+  int32_t finalResult = mFDP->ConsumeIntegralInRange(kErrorNoMin, kErrorNoMax);
+  mLiveDataSource->queueEOS(finalResult);
+  mLiveDataSource->reset();
+  mLiveDataSource->countQueuedBuffers();
+  mLiveDataSource->initCheck();
+  delete[] data;
+}
+
+void HttpLiveFuzzer::initLiveSession() {
+  ALooperRoster looperRoster;
+  mHandler =
+      sp<TestAHandler>::make(std::bind(&HttpLiveFuzzer::signalEos, this));
+  mLiveLooper = sp<ALooper>::make();
+  mLiveLooper->setName("http live");
+  mLiveLooper->start();
+  sp<AMessage> notify = sp<AMessage>::make(0, mHandler);
+  httpService = new TestMediaHTTPService();
+  uint32_t flags = mFDP->ConsumeIntegral<uint32_t>();
+  mLiveSession = sp<LiveSession>::make(notify, flags, httpService);
+  mLiveLooper->registerHandler(mLiveSession);
+  looperRoster.registerHandler(mLiveLooper, mHandler);
+}
+
+void HttpLiveFuzzer::invokeDequeueAccessUnit() {
+  LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+  sp<ABuffer> buffer;
+  mLiveSession->dequeueAccessUnit(stream, &buffer);
+}
+
+void HttpLiveFuzzer::invokeSeekTo() {
+  int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(0, kMaxTimeUs);
+  MediaSource::ReadOptions::SeekMode mode =
+      mFDP->PickValueInArray(kValidSeekMode);
+  mLiveSession->seekTo(timeUs, mode);
+}
+
+void HttpLiveFuzzer::invokeGetConfig() {
+  mLiveSession->getTrackCount();
+  size_t trackIndex = mFDP->ConsumeIntegral<size_t>();
+  mLiveSession->getTrackInfo(trackIndex);
+  media_track_type type = mFDP->PickValueInArray(kValidMediaTrackType);
+  mLiveSession->getSelectedTrack(type);
+  sp<MetaData> meta;
+  LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+  mLiveSession->getStreamFormatMeta(stream, &meta);
+  mLiveSession->getKeyForStream(stream);
+  if (stream != LiveSession::STREAMTYPE_SUBTITLES) {
+    mLiveSession->getSourceTypeForStream(stream);
+  }
+}
+
+void HttpLiveFuzzer::invokeConnectAsync() {
+  string currentFileName = generateFileName();
+  string url = kFileUrlPrefix + currentFileName;
+  string str_1 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+  string str_2 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+
+  KeyedVector<String8, String8> headers;
+  headers.add(String8(str_1.c_str()), String8(str_2.c_str()));
+  mLiveSession->connectAsync(url.c_str(), &headers);
+}
+
+void HttpLiveFuzzer::invokeLiveSession() {
+  initLiveSession();
+  BufferingSettings bufferingSettings;
+  bufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+  bufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
+  mLiveSession->setBufferingSettings(bufferingSettings);
+  invokeConnectAsync();
+  std::unique_lock waitForDownloadComplete(mDownloadCompleteMutex);
+  mConditionalVariable.wait(waitForDownloadComplete,
+                            [this] { return mEosReached; });
+  if (mLiveSession->isSeekable()) {
+    invokeSeekTo();
+  }
+  invokeDequeueAccessUnit();
+  size_t index = mFDP->ConsumeIntegral<size_t>();
+  bool select = mFDP->ConsumeBool();
+  mLiveSession->selectTrack(index, select);
+  mLiveSession->hasDynamicDuration();
+  int64_t firstTimeUs =
+      mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+  int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+  int32_t discontinuitySeq = mFDP->ConsumeIntegral<int32_t>();
+  mLiveSession->calculateMediaTimeUs(firstTimeUs, timeUs, discontinuitySeq);
+  invokeGetConfig();
+}
+
+void HttpLiveFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  createM3U8File(data, size);
+  invokeLiveDataSource();
+  invokeLiveSession();
+  delete mFDP;
+}
+
+void HttpLiveFuzzer::deInitLiveSession() {
+  if (mLiveSession != nullptr) {
+    mLiveSession->disconnect();
+    mLiveLooper->unregisterHandler(mLiveSession->id());
+    mLiveLooper->stop();
+  }
+  mLiveSession.clear();
+  mLiveLooper.clear();
+}
+
+void HttpLiveFuzzer::signalEos() {
+  mEosReached = true;
+  {
+    std::lock_guard<std::mutex> waitForDownloadComplete(mDownloadCompleteMutex);
+  }
+  mConditionalVariable.notify_one();
+  return;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  HttpLiveFuzzer httpliveFuzzer;
+  httpliveFuzzer.process(data, size);
+  return 0;
+}
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
new file mode 100644
index 0000000..703cc7e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
@@ -0,0 +1,15 @@
+#m3u8-Tags
+kw1="#EXTM3U"
+kw2="#EXT-X-VERSION:"
+kw3="#EXT-X-TARGETDURATION:"
+kw4="#EXT-X-PLAYLIST-TYPE:"
+kw5="#EXTINF:"
+kw6="#EXT-X-ENDLIST"
+kw7="#EXT-X-MEDIA-SEQUENCE:"
+kw8="#EXT-X-KEY:METHOD=NONE"
+kw9="#EXT-X-DISCONTINUITY:"
+kw10="#EXT-X-DISCONTINUITY-SEQUENCE:0"
+kw11="#EXT-X-STREAM-INF:BANDWIDTH="
+kw12="#EXT-X-STREAM-INF:CODECS="
+kw13="#EXT-X-BYTERANGE:"
+kw14="#EXT-X-MEDIA"
diff --git a/media/libstagefright/id3/Android.bp b/media/libstagefright/id3/Android.bp
index 3f5ba47..bea3e34 100644
--- a/media/libstagefright/id3/Android.bp
+++ b/media/libstagefright/id3/Android.bp
@@ -20,6 +20,11 @@
 cc_library_static {
     name: "libstagefright_id3",
     min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+
 
     srcs: ["ID3.cpp"],
 
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/libstagefright/id3/TEST_MAPPING
index d82d26e..6106908 100644
--- a/media/libstagefright/id3/TEST_MAPPING
+++ b/media/libstagefright/id3/TEST_MAPPING
@@ -9,14 +9,15 @@
 
   "presubmit-large": [
     // this doesn't seem to run any tests.
-    // but: cts-tradefed run -m CtsMediaTestCases -t android.media.cts.MediaMetadataRetrieverTest
+    // but: cts-tradefed run -m CtsMediaMiscTestCases -t \
+    // android.media.misc.cts.MediaMetadataRetrieverTest
     // does run he 32 and 64 bit tests, but not the instant tests
     // but all I know is that with 'atest', it's not running
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaMiscTestCases",
       "options": [
           {
-            "include-filter": "android.media.cts.MediaMetadataRetrieverTest"
+            "include-filter": "android.media.misc.cts.MediaMetadataRetrieverTest"
           }
       ]
     }
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index d59e4f5..e417324 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -50,7 +50,8 @@
     sp<IMemory> extractFrame(FrameRect *rect = NULL);
 
     static sp<IMemory> getMetadataOnly(
-            const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail = false);
+            const sp<MetaData> &trackMeta, int colorFormat,
+            bool thumbnail = false, uint32_t bitDepth = 0);
 
 protected:
     virtual ~FrameDecoder();
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index c84cc10..5a21755 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -71,6 +71,9 @@
     virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
     virtual void initiateStart();
     virtual void initiateShutdown(bool keepComponentAllocated = false);
+    virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+    virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+    virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
 
     status_t queryCapabilities(
             const char* owner, const char* name,
@@ -519,6 +522,7 @@
     status_t setLatency(uint32_t latency);
     status_t getLatency(uint32_t *latency);
     status_t setTunnelPeek(int32_t tunnelPeek);
+    status_t setTunnelPeekLegacy(int32_t isLegacy);
     status_t setAudioPresentation(int32_t presentationId, int32_t programId);
     status_t setOperatingRate(float rateFloat, bool isVideo);
     status_t getIntraRefreshPeriod(uint32_t *intraRefreshPeriod);
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 43d50f1..5e84977 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -35,7 +35,9 @@
 
 class AudioRecord;
 
-struct AudioSource : public MediaSource, public MediaBufferObserver {
+struct AudioSource : public MediaSource,
+                     public MediaBufferObserver,
+                     public AudioRecord::IAudioRecordCallback {
     // Note that the "channels" parameter _is_ the number of channels,
     // _not_ a bitmask of audio_channels_t constants.
     AudioSource(
@@ -74,7 +76,6 @@
             MediaBufferBase **buffer, const ReadOptions *options = NULL);
     virtual status_t setStopTimeUs(int64_t stopTimeUs);
 
-    status_t dataCallback(const AudioRecord::Buffer& buffer);
     virtual void signalBufferReturned(MediaBufferBase *buffer);
 
     status_t setInputDevice(audio_port_handle_t deviceId);
@@ -142,6 +143,10 @@
     void waitOutstandingEncodingFrames_l();
     status_t reset();
 
+    // IAudioRecordCallback implementation
+    size_t onMoreData(const AudioRecord::Buffer&) override;
+    void onOverrun() override;
+
     AudioSource(const AudioSource &);
     AudioSource &operator=(const AudioSource &);
 
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 75b0d8e..7a05f00 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -47,14 +47,20 @@
             size_t dstCropLeft, size_t dstCropTop,
             size_t dstCropRight, size_t dstCropBottom);
 
+    struct Coeffs; // matrix coefficients
+
 private:
     struct ColorSpace {
         uint32_t mStandard;
         uint32_t mRange;
         uint32_t mTransfer;
 
-        bool isBt709();
-        bool isJpeg();
+        bool isBt2020() const;
+
+        // libyuv helper methods
+        bool isH420() const;
+        bool isI420() const;
+        bool isJ420() const;
     };
 
     struct BitmapParams {
@@ -78,8 +84,13 @@
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
     ColorSpace mSrcColorSpace;
     uint8_t *mClip;
+    uint16_t *mClip10Bit;
 
     uint8_t *initClip();
+    uint16_t *initClip10Bit();
+
+    // returns the YUV2RGB matrix coefficients according to the color aspects and bit depth
+    const struct Coeffs *getMatrix() const;
 
     status_t convertCbYCrY(
             const BitmapParams &src, const BitmapParams &dst);
@@ -108,9 +119,19 @@
     status_t convertYUV420SemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    status_t convertYUV420SemiPlanarBase(
+            const BitmapParams &src, const BitmapParams &dst,
+            const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21 = false);
+
     status_t convertTIYUV420PackedSemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    status_t convertYUVP010(
+                const BitmapParams &src, const BitmapParams &dst);
+
+    status_t convertYUVP010ToRGBA1010102(
+                const BitmapParams &src, const BitmapParams &dst);
+
     ColorConverter(const ColorConverter &);
     ColorConverter &operator=(const ColorConverter &);
 };
diff --git a/media/libstagefright/include/media/stagefright/MediaBuffer.h b/media/libstagefright/include/media/stagefright/MediaBuffer.h
index 2c03f27..f070aac 100644
--- a/media/libstagefright/include/media/stagefright/MediaBuffer.h
+++ b/media/libstagefright/include/media/stagefright/MediaBuffer.h
@@ -105,7 +105,6 @@
         if (mMemory.get() == nullptr || mMemory->unsecurePointer() == nullptr) return 0;
         int32_t remoteRefcount =
                 reinterpret_cast<SharedControl *>(mMemory->unsecurePointer())->getRemoteRefcount();
-        // Sanity check so that remoteRefCount() is non-negative.
         return remoteRefcount >= 0 ? remoteRefcount : 0; // do not allow corrupted data.
 #else
         return 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index d372140..1d2d711 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -25,6 +25,7 @@
 #include <media/hardware/CryptoAPI.h>
 #include <media/MediaCodecInfo.h>
 #include <media/MediaMetrics.h>
+#include <media/MediaProfiles.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <utils/Vector.h>
@@ -70,6 +71,13 @@
 using aidl::android::media::MediaResourceParcel;
 
 struct MediaCodec : public AHandler {
+    enum Domain {
+        DOMAIN_UNKNOWN = 0,
+        DOMAIN_VIDEO = 1,
+        DOMAIN_AUDIO = 2,
+        DOMAIN_IMAGE = 3
+    };
+
     enum ConfigureFlags {
         CONFIGURE_FLAG_ENCODE           = 1,
         CONFIGURE_FLAG_USE_BLOCK_MODEL  = 2,
@@ -390,6 +398,7 @@
     // <all states>     -> EnabledNoBuffer  when flush
     // <all states>     -> EnabledNoBuffer  when stop then configure then start
     enum struct TunnelPeekState {
+        kLegacyMode,
         kDisabledNoBuffer,
         kEnabledNoBuffer,
         kDisabledQueued,
@@ -401,7 +410,6 @@
     struct ResourceManagerServiceProxy;
 
     State mState;
-    uid_t mUid;
     bool mReleasedByResourceManager;
     sp<ALooper> mLooper;
     sp<ALooper> mCodecLooper;
@@ -438,13 +446,20 @@
 
     sp<ResourceManagerServiceProxy> mResourceManagerProxy;
 
-    bool mIsVideo;
+    Domain mDomain;
     AString mLogSessionId;
-    int32_t mVideoWidth;
-    int32_t mVideoHeight;
+    int32_t mWidth;
+    int32_t mHeight;
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
+    int32_t mConfigColorTransfer;
+    bool mHDRStaticInfo;
+    bool mHDR10PlusInfo;
+    void updateHDRFormatMetric();
+    hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
+            const AString &mediaType);
+
     // initial create parameters
     AString mInitName;
 
@@ -496,7 +511,7 @@
 
     std::shared_ptr<BufferChannelBase> mBufferChannel;
 
-    PlaybackDurationAccumulator * mPlaybackDurationAccumulator;
+    std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
     bool mIsSurfaceToScreen;
 
     MediaCodec(
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index c2c79dd..78792c5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -364,7 +364,7 @@
 inline static const char *asString_AV1Profile(int32_t i, const char *def = "??") {
     switch (i) {
         case AV1ProfileMain8:           return "Main8";
-        case AV1ProfileMain10:          return "Main10HDR";
+        case AV1ProfileMain10:          return "Main10";
         case AV1ProfileMain10HDR10:     return "Main10HDR10";
         case AV1ProfileMain10HDR10Plus: return "Main10HDRPlus";
         default:                        return def;
@@ -540,6 +540,9 @@
 constexpr int32_t DolbyVisionLevelUhd30   = 0x40;
 constexpr int32_t DolbyVisionLevelUhd48   = 0x80;
 constexpr int32_t DolbyVisionLevelUhd60   = 0x100;
+constexpr int32_t DolbyVisionLevelUhd120  = 0x200;
+constexpr int32_t DolbyVisionLevel8k30    = 0x400;
+constexpr int32_t DolbyVisionLevel8k60    = 0x800;
 
 inline static const char *asString_DolbyVisionLevel(int32_t i, const char *def = "??") {
     switch (i) {
@@ -552,6 +555,9 @@
         case DolbyVisionLevelUhd30: return "Uhd30";
         case DolbyVisionLevelUhd48: return "Uhd48";
         case DolbyVisionLevelUhd60: return "Uhd60";
+        case DolbyVisionLevelUhd120: return "Uhd120";
+        case DolbyVisionLevel8k30:  return "8k30";
+        case DolbyVisionLevel8k60:  return "8k60";
         default:                    return def;
     }
 }
@@ -586,9 +592,11 @@
 constexpr int32_t COLOR_Format24bitBGR888             = 12;
 constexpr int32_t COLOR_Format24bitRGB888             = 11;
 constexpr int32_t COLOR_Format25bitARGB1888           = 14;
+constexpr int32_t COLOR_Format32bitABGR2101010        = 0x7F00AAA2;
 constexpr int32_t COLOR_Format32bitABGR8888           = 0x7F00A000;
 constexpr int32_t COLOR_Format32bitARGB8888           = 16;
 constexpr int32_t COLOR_Format32bitBGRA8888           = 15;
+constexpr int32_t COLOR_Format64bitABGRFloat          = 0x7F000F16;
 constexpr int32_t COLOR_Format8bitRGB332              = 2;
 constexpr int32_t COLOR_FormatCbYCrY                  = 27;
 constexpr int32_t COLOR_FormatCrYCbY                  = 28;
@@ -642,9 +650,11 @@
         case COLOR_Format24bitBGR888:               return "24bitBGR888";
         case COLOR_Format24bitRGB888:               return "24bitRGB888";
         case COLOR_Format25bitARGB1888:             return "25bitARGB1888";
+        case COLOR_Format32bitABGR2101010:          return "32bitABGR2101010";
         case COLOR_Format32bitABGR8888:             return "32bitABGR8888";
         case COLOR_Format32bitARGB8888:             return "32bitARGB8888";
         case COLOR_Format32bitBGRA8888:             return "32bitBGRA8888";
+        case COLOR_Format64bitABGRFloat:            return "64bitABGRFloat";
         case COLOR_Format8bitRGB332:                return "8bitRGB332";
         case COLOR_FormatCbYCrY:                    return "CbYCrY";
         case COLOR_FormatCrYCbY:                    return "CrYCbY";
@@ -677,6 +687,7 @@
         case COLOR_FormatYUV422SemiPlanar:          return "YUV422SemiPlanar";
         case COLOR_FormatYUV444Flexible:            return "YUV444Flexible";
         case COLOR_FormatYUV444Interleaved:         return "YUV444Interleaved";
+        case COLOR_FormatYUVP010:                   return "YUVP010";
         case COLOR_QCOM_FormatYUV420SemiPlanar:     return "QCOM_YUV420SemiPlanar";
         case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
         default:                                    return def;
@@ -684,6 +695,7 @@
 }
 
 constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
+constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
 constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
 constexpr char FEATURE_PartialFrame[] = "partial-frame";
 constexpr char FEATURE_QpBounds[] = "qp-bounds";
@@ -737,6 +749,14 @@
 constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
 constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
 
+constexpr int32_t PICTURE_TYPE_I = 1;
+constexpr int32_t PICTURE_TYPE_P = 2;
+constexpr int32_t PICTURE_TYPE_B = 3;
+constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
+
+constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
+constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
+
 constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
 constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
 constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
@@ -796,6 +816,7 @@
 constexpr char KEY_OPERATING_RATE[] = "operating-rate";
 constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
 constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
+constexpr char KEY_PICTURE_TYPE[] = "picture-type";
 constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
 constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
 constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
@@ -812,6 +833,8 @@
 constexpr char KEY_TILE_HEIGHT[] = "tile-height";
 constexpr char KEY_TILE_WIDTH[] = "tile-width";
 constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
+constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
 constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
 constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
 constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
diff --git a/media/libstagefright/MediaCodecListOverrides.h b/media/libstagefright/include/media/stagefright/MediaCodecListOverrides.h
similarity index 100%
rename from media/libstagefright/MediaCodecListOverrides.h
rename to media/libstagefright/include/media/stagefright/MediaCodecListOverrides.h
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index d1df2ca..b91c850 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -163,11 +163,28 @@
             || (ERROR_DRM_VENDOR_MIN <= err && err <= ERROR_DRM_VENDOR_MAX);
 }
 
-static inline std::string StrCryptoError(status_t err) {
 #define STATUS_CASE(STATUS) \
     case STATUS:            \
         return #STATUS
 
+static inline std::string StrMediaError(status_t err) {
+    switch(err) {
+        STATUS_CASE(ERROR_ALREADY_CONNECTED);
+        STATUS_CASE(ERROR_NOT_CONNECTED);
+        STATUS_CASE(ERROR_UNKNOWN_HOST);
+        STATUS_CASE(ERROR_CANNOT_CONNECT);
+        STATUS_CASE(ERROR_IO);
+        STATUS_CASE(ERROR_CONNECTION_LOST);
+        STATUS_CASE(ERROR_MALFORMED);
+        STATUS_CASE(ERROR_OUT_OF_RANGE);
+        STATUS_CASE(ERROR_BUFFER_TOO_SMALL);
+        STATUS_CASE(ERROR_UNSUPPORTED);
+        STATUS_CASE(ERROR_END_OF_STREAM);
+    }
+    return statusToString(err);
+}
+
+static inline std::string StrCryptoError(status_t err) {
     switch (err) {
         STATUS_CASE(ERROR_DRM_UNKNOWN);
         STATUS_CASE(ERROR_DRM_NO_LICENSE);
@@ -209,10 +226,10 @@
         STATUS_CASE(ERROR_DRM_STORAGE_READ);
         STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
         STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
-#undef STATUS_CASE
     }
     return statusToString(err);
 }
+#undef STATUS_CASE
 
 }  // namespace android
 
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index c80012e..88c1f3f 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -60,6 +60,8 @@
     kKeyAVCC              = 'avcc',  // raw data
     kKeyHVCC              = 'hvcc',  // raw data
     kKeyDVCC              = 'dvcc',  // raw data
+    kKeyDVVC              = 'dvvc',  // raw data
+    kKeyDVWC              = 'dvwc',  // raw data
     kKeyAV1C              = 'av1c',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
     kKeyThumbnailAV1C     = 'tav1',  // raw data
@@ -283,6 +285,8 @@
     kTypeHVCC        = 'hvcc',
     kTypeAV1C        = 'av1c',
     kTypeDVCC        = 'dvcc',
+    kTypeDVVC        = 'dvvc',
+    kTypeDVWC        = 'dvwc',
     kTypeD263        = 'd263',
     kTypeHCOS        = 'hcos',
 };
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfo.h b/media/libstagefright/include/media/stagefright/ProcessInfo.h
index b8a3c10..06b9c92 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfo.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfo.h
@@ -30,7 +30,8 @@
     ProcessInfo();
 
     virtual bool getPriority(int pid, int* priority);
-    virtual bool isValidPid(int pid);
+    virtual bool isPidTrusted(int pid);
+    virtual bool isPidUidTrusted(int pid, int uid);
     virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
     virtual void removeProcessInfoOverride(int pid);
 
diff --git a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
index 9260181..b7fc858 100644
--- a/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
+++ b/media/libstagefright/include/media/stagefright/ProcessInfoInterface.h
@@ -23,7 +23,8 @@
 
 struct ProcessInfoInterface : public RefBase {
     virtual bool getPriority(int pid, int* priority) = 0;
-    virtual bool isValidPid(int pid) = 0;
+    virtual bool isPidTrusted(int pid) = 0;
+    virtual bool isPidUidTrusted(int pid, int uid) = 0;
     virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
     virtual void removeProcessInfoOverride(int pid);
 
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
index fcfcbec..a4f512a 100644
--- a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
+++ b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
@@ -21,7 +21,9 @@
 
 namespace android {
 
-class ISurfaceComposer;
+namespace gui {
+    class ISurfaceComposer;
+}
 
 struct VideoFrameScheduler : public VideoFrameSchedulerBase {
     VideoFrameScheduler();
@@ -32,7 +34,7 @@
 
 private:
     void updateVsync() override;
-    sp<ISurfaceComposer> mComposer;
+    sp<gui::ISurfaceComposer> mComposer;
 };
 
 }  // namespace android
diff --git a/media/libstagefright/mpeg2ts/Android.bp b/media/libstagefright/mpeg2ts/Android.bp
index a970224..283df1e 100644
--- a/media/libstagefright/mpeg2ts/Android.bp
+++ b/media/libstagefright/mpeg2ts/Android.bp
@@ -27,11 +27,6 @@
         "ESQueue.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/native/include/media/openmax",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
@@ -59,16 +54,21 @@
         "libstagefright_foundation_headers",
     ],
 
-    export_include_dirs: ["."],
+    export_include_dirs: ["include"],
+
+    local_include_dirs: ["include/mpeg2ts"],
 
     whole_static_libs: [
         "libstagefright_metadatautils",
     ],
 
+}
+
+cc_defaults {
+    name: "libstagefright_mpeg2support_sdk_defaults",
+
     min_sdk_version: "29",
-
     host_supported: true,
-
     target: {
         darwin: {
             enabled: false,
@@ -76,11 +76,19 @@
     },
 }
 
+cc_library_headers {
+    name: "libstagefright_mpeg2support_headers",
+    defaults: [
+        "libstagefright_mpeg2support_sdk_defaults",
+    ],
+    export_include_dirs: ["include"],
+}
 
 cc_library_static {
     name: "libstagefright_mpeg2support",
     defaults: [
         "libstagefright_mpeg2support_defaults",
+        "libstagefright_mpeg2support_sdk_defaults",
     ],
     cflags: [
         "-DENABLE_CRYPTO",
@@ -97,6 +105,7 @@
     name: "libstagefright_mpeg2support_nocrypto",
     defaults: [
         "libstagefright_mpeg2support_defaults",
+        "libstagefright_mpeg2support_sdk_defaults",
     ],
     apex_available: [
         "com.android.media",
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/include/mpeg2ts/ATSParser.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/ATSParser.h
rename to media/libstagefright/mpeg2ts/include/mpeg2ts/ATSParser.h
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/include/mpeg2ts/AnotherPacketSource.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/AnotherPacketSource.h
rename to media/libstagefright/mpeg2ts/include/mpeg2ts/AnotherPacketSource.h
diff --git a/media/libstagefright/mpeg2ts/CasManager.h b/media/libstagefright/mpeg2ts/include/mpeg2ts/CasManager.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/CasManager.h
rename to media/libstagefright/mpeg2ts/include/mpeg2ts/CasManager.h
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/include/mpeg2ts/ESQueue.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/ESQueue.h
rename to media/libstagefright/mpeg2ts/include/mpeg2ts/ESQueue.h
diff --git a/media/libstagefright/mpeg2ts/HlsSampleDecryptor.h b/media/libstagefright/mpeg2ts/include/mpeg2ts/HlsSampleDecryptor.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/HlsSampleDecryptor.h
rename to media/libstagefright/mpeg2ts/include/mpeg2ts/HlsSampleDecryptor.h
diff --git a/media/libstagefright/mpeg2ts/SampleDecryptor.h b/media/libstagefright/mpeg2ts/include/mpeg2ts/SampleDecryptor.h
similarity index 100%
rename from media/libstagefright/mpeg2ts/SampleDecryptor.h
rename to media/libstagefright/mpeg2ts/include/mpeg2ts/SampleDecryptor.h
diff --git a/media/libstagefright/mpeg2ts/test/Android.bp b/media/libstagefright/mpeg2ts/test/Android.bp
index 464b039..34a8d3e 100644
--- a/media/libstagefright/mpeg2ts/test/Android.bp
+++ b/media/libstagefright/mpeg2ts/test/Android.bp
@@ -57,11 +57,6 @@
         "libstagefright_mpeg2support",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/extractors/",
-        "frameworks/av/media/libstagefright/",
-    ],
-
     header_libs: [
         "libmedia_headers",
         "libaudioclient_headers",
diff --git a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
index 79c233b..9e24a99 100644
--- a/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
+++ b/media/libstagefright/mpeg2ts/test/Mpeg2tsUnitTest.cpp
@@ -26,9 +26,8 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataBase.h>
 #include <media/stagefright/foundation/AUtils.h>
-
-#include "mpeg2ts/ATSParser.h"
-#include "mpeg2ts/AnotherPacketSource.h"
+#include <mpeg2ts/AnotherPacketSource.h>
+#include <mpeg2ts/ATSParser.h>
 
 #include "Mpeg2tsUnitTestEnvironment.h"
 
diff --git a/media/libstagefright/omx/OMXStore.cpp b/media/libstagefright/omx/OMXStore.cpp
index e8fee42..4827d9e 100644
--- a/media/libstagefright/omx/OMXStore.cpp
+++ b/media/libstagefright/omx/OMXStore.cpp
@@ -28,6 +28,8 @@
 #include <dlfcn.h>
 #include <fcntl.h>
 
+#include <sstream>
+
 namespace android {
 
 OMXStore::OMXStore() {
@@ -99,9 +101,35 @@
     }
 }
 
+static int getFirstApiLevel() {
+    int boardApiLevel = android::base::GetIntProperty("ro.board.first_api_level", 0);
+    if (boardApiLevel != 0) {
+        return boardApiLevel;
+    }
+
+    return android::base::GetIntProperty("ro.product.first_api_level", __ANDROID_API_T__);
+}
+
+static bool isTV() {
+    static const bool kIsTv = []() {
+        std::string characteristics = android::base::GetProperty("ro.build.characteristics", "");
+        std::stringstream ss(characteristics);
+        for (std::string item; std::getline(ss, item, ','); ) {
+            if (item == "tv") {
+                return true;
+            }
+        }
+        return false;
+    }();
+    return kIsTv;
+}
+
 void OMXStore::addPlugin(OMXPluginBase *plugin) {
     Mutex::Autolock autoLock(mLock);
 
+    bool typeTV = isTV();
+    int firstApiLevel = getFirstApiLevel();
+
     OMX_U32 index = 0;
 
     char name[128];
@@ -110,6 +138,32 @@
                     name, sizeof(name), index++)) == OMX_ErrorNone) {
         String8 name8(name);
 
+        Vector<String8> roles;
+        OMX_ERRORTYPE err = plugin->getRolesOfComponent(name, &roles);
+        if (err == OMX_ErrorNone) {
+            bool skip = false;
+            for (String8 role : roles) {
+                if (role.find("video_decoder") != -1 || role.find("video_encoder") != -1) {
+                    if (firstApiLevel >= __ANDROID_API_T__) {
+                        skip = true;
+                        break;
+                    } else if (!typeTV && firstApiLevel >= __ANDROID_API_S__) {
+                        skip = true;
+                        break;
+                    }
+                }
+                if (role.find("audio_decoder") != -1 || role.find("audio_encoder") != -1) {
+                    if (firstApiLevel >= __ANDROID_API_T__) {
+                        skip = true;
+                        break;
+                    }
+                }
+            }
+            if (skip) {
+                continue;
+            }
+        }
+
         if (mPluginByComponentName.indexOfKey(name8) >= 0) {
             ALOGE("A component of name '%s' already exists, ignoring this one.",
                  name8.string());
diff --git a/media/libstagefright/omx/tests/Android.bp b/media/libstagefright/omx/tests/Android.bp
index 83cc80e..159fd19 100644
--- a/media/libstagefright/omx/tests/Android.bp
+++ b/media/libstagefright/omx/tests/Android.bp
@@ -43,7 +43,6 @@
     header_libs: [
         "libbase_headers",
         "libmediametrics_headers",
-        "media_ndk_headers",
     ],
 
     cflags: [
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 9a7bad9..3c00a1c 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -87,7 +87,7 @@
         enabled: true,
     },
     double_loadable: true,
-    clang: true,
+
     cflags: [
         "-fvisibility=hidden",
         "-Werror=format",
diff --git a/media/libstagefright/rtsp/AAMRAssembler.cpp b/media/libstagefright/rtsp/AAMRAssembler.cpp
index bb2a238..e773031 100644
--- a/media/libstagefright/rtsp/AAMRAssembler.cpp
+++ b/media/libstagefright/rtsp/AAMRAssembler.cpp
@@ -18,9 +18,8 @@
 #define LOG_TAG "AAMRAssembler"
 #include <utils/Log.h>
 
-#include "AAMRAssembler.h"
-
-#include "ARTPSource.h"
+#include <media/stagefright/rtsp/AAMRAssembler.h>
+#include <media/stagefright/rtsp/ARTPSource.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 30cdbc9..2f516d5 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -18,9 +18,9 @@
 #define LOG_TAG "AAVCAssembler"
 #include <utils/Log.h>
 
-#include "AAVCAssembler.h"
+#include <media/stagefright/rtsp/AAVCAssembler.h>
 
-#include "ARTPSource.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/AH263Assembler.cpp b/media/libstagefright/rtsp/AH263Assembler.cpp
index 3436e95..584b4de 100644
--- a/media/libstagefright/rtsp/AH263Assembler.cpp
+++ b/media/libstagefright/rtsp/AH263Assembler.cpp
@@ -17,9 +17,9 @@
 #define LOG_TAG "AH263Assembler"
 #include <utils/Log.h>
 
-#include "AH263Assembler.h"
+#include <media/stagefright/rtsp/AH263Assembler.h>
 
-#include "ARTPSource.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index b240339..bb42d1f 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -18,14 +18,14 @@
 #define LOG_TAG "AHEVCAssembler"
 #include <utils/Log.h>
 
-#include "AHEVCAssembler.h"
+#include <media/stagefright/rtsp/AHEVCAssembler.h>
 
-#include "ARTPSource.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
 
+#include <HevcUtils.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
-#include <include/HevcUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
 
 #include <stdint.h>
diff --git a/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp b/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
index 0988774..2101de1 100644
--- a/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
@@ -18,10 +18,10 @@
 #define LOG_TAG "AMPEG2TSAssembler"
 #include <utils/Log.h>
 
-#include "AMPEG2TSAssembler.h"
+#include <media/stagefright/rtsp/AMPEG2TSAssembler.h>
 
-#include "ARTPSource.h"
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index 4302aee..0fc03ae 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -17,9 +17,9 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AMPEG4AudioAssembler"
 
-#include "AMPEG4AudioAssembler.h"
+#include <media/stagefright/rtsp/AMPEG4AudioAssembler.h>
 
-#include "ARTPSource.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
 
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/foundation/ABitReader.h>
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 7bd33c1..1c8eef5 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -18,10 +18,10 @@
 #define LOG_TAG "AMPEG4ElementaryAssembler"
 #include <utils/Log.h>
 
-#include "AMPEG4ElementaryAssembler.h"
+#include <media/stagefright/rtsp/AMPEG4ElementaryAssembler.h>
 
-#include "ARTPSource.h"
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
@@ -204,7 +204,7 @@
 };
 
 bool AMPEG4ElementaryAssembler::initCheck() {
-    if(mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0) {
+    if(mIsGeneric && (mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0)) {
         android_errorWriteLog(0x534e4554, "124777537");
         return false;
     }
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 169df46..db63183 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -18,10 +18,9 @@
 #define LOG_TAG "APacketSource"
 #include <utils/Log.h>
 
-#include "APacketSource.h"
-
-#include "ARawAudioAssembler.h"
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/APacketSource.h>
+#include <media/stagefright/rtsp/ARawAudioAssembler.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <ctype.h>
 
diff --git a/media/libstagefright/rtsp/ARTPAssembler.cpp b/media/libstagefright/rtsp/ARTPAssembler.cpp
index 52aa3a0..b9869de 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.cpp
+++ b/media/libstagefright/rtsp/ARTPAssembler.cpp
@@ -15,7 +15,7 @@
  */
 
 #define LOG_TAG "ARTPAssembler"
-#include "ARTPAssembler.h"
+#include <media/stagefright/rtsp/ARTPAssembler.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 83291f3..a61f48f 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -18,9 +18,10 @@
 #define LOG_TAG "ARTPConnection"
 #include <utils/Log.h>
 
-#include "ARTPConnection.h"
-#include "ARTPSource.h"
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/ARTPAssembler.h>
+#include <media/stagefright/rtsp/ARTPConnection.h>
+#include <media/stagefright/rtsp/ARTPSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -41,6 +42,10 @@
     return data[0] << 8 | data[1];
 }
 
+static uint32_t u24at(const uint8_t *data) {
+    return u16at(data) << 16 | data[2];
+}
+
 static uint32_t u32at(const uint8_t *data) {
     return u16at(data) << 16 | u16at(&data[2]);
 }
@@ -877,11 +882,15 @@
         switch (data[1]) {
             case 200:
             {
-                parseSR(s, data, headerLength);
+                parseSenderReport(s, data, headerLength);
                 break;
             }
 
             case 201:  // RR
+            {
+                parseReceiverReport(s, data, headerLength);
+                break;
+            }
             case 202:  // SDES
             case 204:  // APP
                 break;
@@ -940,18 +949,44 @@
     return OK;
 }
 
-status_t ARTPConnection::parseSR(
+status_t ARTPConnection::parseSenderReport(
         StreamInfo *s, const uint8_t *data, size_t size) {
-    size_t RC = data[0] & 0x1f;
-
-    if (size < (7 + RC * 6) * 4) {
-        // Packet too short for the minimal SR header.
+    ALOG_ASSERT(size >= 1, "parseSenderReport: invalid packet size.");
+    size_t receptionReportCount = data[0] & 0x1f;
+    if (size < (7 + (receptionReportCount * 6)) * 4) {
+        // Packet too short for the minimal sender report header.
         return -1;
     }
 
-    uint32_t id = u32at(&data[4]);
+    int64_t recvTimeUs = ALooper::GetNowUs();
+    uint32_t senderId = u32at(&data[4]);
     uint64_t ntpTime = u64at(&data[8]);
     uint32_t rtpTime = u32at(&data[16]);
+    uint32_t pktCount = u32at(&data[20]);
+    uint32_t octCount = u32at(&data[24]);
+
+    ALOGD("SR received: ssrc=0x%08x, rtpTime%u == ntpTime %llu, pkt=%u, oct=%u",
+            senderId, rtpTime, (unsigned long long)ntpTime, pktCount, octCount);
+
+    sp<ARTPSource> source = findSource(s, senderId);
+    source->timeUpdate(recvTimeUs, rtpTime, ntpTime);
+
+    for (int32_t i = 0; i < receptionReportCount; i++) {
+        int32_t offset = 28 + (i * 24);
+        parseReceptionReportBlock(s, recvTimeUs, senderId, data + offset, size - offset);
+    }
+
+    return 0;
+}
+
+status_t ARTPConnection::parseReceiverReport(
+        StreamInfo *s, const uint8_t *data, size_t size) {
+    ALOG_ASSERT(size >= 1, "parseReceiverReport: invalid packet size.");
+    size_t receptionReportCount = data[0] & 0x1f;
+    if (size < (2 + (receptionReportCount * 6)) * 4) {
+        // Packet too short for the minimal receiver report header.
+        return -1;
+    }
 
 #if 0
     ALOGI("XXX timeUpdate: ssrc=0x%08x, rtpTime %u == ntpTime %.3f",
@@ -959,10 +994,40 @@
          rtpTime,
          (ntpTime >> 32) + (double)(ntpTime & 0xffffffff) / (1ll << 32));
 #endif
+    int64_t recvTimeUs = ALooper::GetNowUs();
+    uint32_t senderId = u32at(&data[4]);
 
-    sp<ARTPSource> source = findSource(s, id);
+    for (int i = 0; i < receptionReportCount; i++) {
+        int32_t offset = 8 + (i * 24);
+        parseReceptionReportBlock(s, recvTimeUs, senderId, data + offset, size - offset);
+    }
 
-    source->timeUpdate(rtpTime, ntpTime);
+    return 0;
+}
+
+status_t ARTPConnection::parseReceptionReportBlock(
+        StreamInfo *s, int64_t recvTimeUs, uint32_t senderId, const uint8_t *data, size_t size) {
+    ALOG_ASSERT(size >= 24, "parseReceptionReportBlock: invalid packet size.");
+    if (size < 24) {
+        // remaining size is smaller than reception report block size.
+        return -1;
+    }
+
+    uint32_t rbId = u32at(&data[0]);
+    uint32_t fLost = data[4];
+    int32_t cumLost = u24at(&data[5]);
+    uint32_t ehSeq = u32at(&data[8]);
+    uint32_t jitter = u32at(&data[12]);
+    uint32_t lsr = u32at(&data[16]);
+    uint32_t dlsr = u32at(&data[20]);
+
+    ALOGD("Reception Report Block: t:%llu sid:%u rid:%u fl:%u cl:%u hs:%u jt:%u lsr:%u dlsr:%u",
+            (unsigned long long)recvTimeUs, senderId, rbId, fLost, cumLost,
+            ehSeq, jitter, lsr, dlsr);
+    sp<ARTPSource> source = findSource(s, senderId);
+    sp<ReceptionReportBlock> rrb = new ReceptionReportBlock(
+            rbId, fLost, cumLost, ehSeq, jitter, lsr, dlsr);
+    source->processReceptionReportBlock(recvTimeUs, senderId, rrb);
 
     return 0;
 }
diff --git a/media/libstagefright/rtsp/ARTPSession.cpp b/media/libstagefright/rtsp/ARTPSession.cpp
index e5acb06..dae46f9 100644
--- a/media/libstagefright/rtsp/ARTPSession.cpp
+++ b/media/libstagefright/rtsp/ARTPSession.cpp
@@ -18,7 +18,10 @@
 #define LOG_TAG "ARTPSession"
 #include <utils/Log.h>
 
-#include "ARTPSession.h"
+#include <media/stagefright/rtsp/APacketSource.h>
+#include <media/stagefright/rtsp/ARTPConnection.h>
+#include <media/stagefright/rtsp/ARTPSession.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -29,9 +32,6 @@
 #include <arpa/inet.h>
 #include <sys/socket.h>
 
-#include "APacketSource.h"
-#include "ARTPConnection.h"
-#include "ASessionDescription.h"
 
 namespace android {
 
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 38a370b..717d8af 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -18,17 +18,17 @@
 #define LOG_TAG "ARTPSource"
 #include <utils/Log.h>
 
-#include "ARTPSource.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
 
-#include "AAMRAssembler.h"
-#include "AAVCAssembler.h"
-#include "AHEVCAssembler.h"
-#include "AH263Assembler.h"
-#include "AMPEG2TSAssembler.h"
-#include "AMPEG4AudioAssembler.h"
-#include "AMPEG4ElementaryAssembler.h"
-#include "ARawAudioAssembler.h"
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/AAMRAssembler.h>
+#include <media/stagefright/rtsp/AAVCAssembler.h>
+#include <media/stagefright/rtsp/AHEVCAssembler.h>
+#include <media/stagefright/rtsp/AH263Assembler.h>
+#include <media/stagefright/rtsp/AMPEG2TSAssembler.h>
+#include <media/stagefright/rtsp/AMPEG4AudioAssembler.h>
+#include <media/stagefright/rtsp/AMPEG4ElementaryAssembler.h>
+#include <media/stagefright/rtsp/ARawAudioAssembler.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -132,10 +132,10 @@
     }
 }
 
-void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
+void ARTPSource::timeUpdate(int64_t recvTimeUs, uint32_t rtpTime, uint64_t ntpTime) {
     mLastSrRtpTime = rtpTime;
     mLastSrNtpTime = ntpTime;
-    mLastSrUpdateTimeUs = ALooper::GetNowUs();
+    mLastSrUpdateTimeUs = recvTimeUs;
 
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("time-update", true);
@@ -143,7 +143,30 @@
     notify->setInt64("ntp-time", ntpTime);
     notify->setInt32("rtcp-event", 1);
     notify->setInt32("payload-type", RTCP_SR);
-    notify->setInt64("recv-time-us", mLastSrUpdateTimeUs);
+    notify->setInt64("recv-time-us", recvTimeUs);
+    notify->post();
+}
+
+void ARTPSource::processReceptionReportBlock(
+        int64_t recvTimeUs, uint32_t senderId, sp<ReceptionReportBlock> rrb) {
+    mLastRrUpdateTimeUs = recvTimeUs;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("rtcp-event", 1);
+    // A Reception Report Block (RRB) can be included in both Sender Report and Receiver Report.
+    // But it means 'Packet Reception Report' actually.
+    // So that, we will report RRB as RR since there is no meaning difference
+    // between RRB(Reception Report Block) and RR(Receiver Report).
+    notify->setInt32("payload-type", RTCP_RR);
+    notify->setInt64("recv-time-us", recvTimeUs);
+    notify->setInt32("rtcp-rr-ssrc", senderId);
+    notify->setInt32("rtcp-rrb-ssrc", rrb->ssrc);
+    notify->setInt32("rtcp-rrb-fraction", rrb->fraction);
+    notify->setInt32("rtcp-rrb-lost", rrb->lost);
+    notify->setInt32("rtcp-rrb-lastSeq", rrb->lastSeq);
+    notify->setInt32("rtcp-rrb-jitter", rrb->jitter);
+    notify->setInt32("rtcp-rrb-lsr", rrb->lsr);
+    notify->setInt32("rtcp-rrb-dlsr", rrb->dlsr);
     notify->post();
 }
 
@@ -453,7 +476,8 @@
     data[18] = (mHighestSeqNumber >> 8) & 0xff;
     data[19] = mHighestSeqNumber & 0xff;
 
-    uint32_t jitterTime = 0;
+    uint32_t jitterTimeMs = (uint32_t)getInterArrivalJitterTimeMs();
+    uint32_t jitterTime = jitterTimeMs * mClockRate / 1000;
     data[20] = jitterTime >> 24;    // Interarrival jitter
     data[21] = (jitterTime >> 16) & 0xff;
     data[22] = (jitterTime >> 8) & 0xff;
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 11c7aeb..8990f0c 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "ARTPWriter"
 #include <utils/Log.h>
 
-#include "ARTPWriter.h"
+#include <media/stagefright/rtsp/ARTPWriter.h>
 
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index c33bf3f..aab63a8 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -18,8 +18,8 @@
 #define LOG_TAG "ARTSPConnection"
 #include <utils/Log.h>
 
-#include "ARTSPConnection.h"
-#include "NetworkUtils.h"
+#include <media/stagefright/rtsp/ARTSPConnection.h>
+#include <media/stagefright/rtsp/NetworkUtils.h>
 
 #include <datasource/HTTPBase.h>
 #include <media/stagefright/foundation/ABuffer.h>
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.cpp b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
index 167f7a4..9210af3 100644
--- a/media/libstagefright/rtsp/ARawAudioAssembler.cpp
+++ b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
@@ -18,10 +18,10 @@
 #define LOG_TAG "ARawAudioAssembler"
 #include <utils/Log.h>
 
-#include "ARawAudioAssembler.h"
+#include <media/stagefright/rtsp/ARawAudioAssembler.h>
 
-#include "ARTPSource.h"
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/ARTPSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 5b5b4b1..217eca7 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "ASessionDescription"
 #include <utils/Log.h>
 
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AString.h>
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index 34d1788..5b63cc2 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -47,10 +47,9 @@
         "libmedia",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/native/include/media/openmax",
-        "frameworks/native/include/android",
+    header_libs: [
+        "libstagefright_headers",
+        "libstagefright_rtsp_headers",
     ],
 
     arch: {
@@ -73,6 +72,18 @@
     },
 }
 
+cc_library_headers {
+    name: "libstagefright_rtsp_headers",
+    export_include_dirs: ["include"],
+    vendor_available: true,
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
 cc_library_static {
     name: "libstagefright_rtsp",
 
@@ -90,6 +101,10 @@
 
 //###############################################################################
 
+// 2022-01-13: this is stale; it's been disabled for 5 years.
+// test references UDPPusher and ARTPSession, neither of which is built into
+// libstagefright_rtsp
+//
 cc_test {
     name: "rtp_test",
     gtest: false,
@@ -108,10 +123,9 @@
 
     static_libs: ["libstagefright_rtsp"],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/av/cmds/stagefright",
-        "frameworks/native/include/media/openmax",
+    header_libs: [
+        "libstagefright_headers",
+        "libstagefright_rtsp_headers",
     ],
 
     cflags: [
diff --git a/media/libstagefright/rtsp/JitterCalculator.cpp b/media/libstagefright/rtsp/JitterCalculator.cpp
index 7e60be2..93afe9c 100644
--- a/media/libstagefright/rtsp/JitterCalculator.cpp
+++ b/media/libstagefright/rtsp/JitterCalculator.cpp
@@ -17,7 +17,7 @@
 #define LOG_TAG "JitterCalc"
 #include <utils/Log.h>
 
-#include "JitterCalculator.h"
+#include <media/stagefright/rtsp/JitterCalculator.h>
 
 #include <stdlib.h>
 
diff --git a/media/libstagefright/rtsp/MyTransmitter.h b/media/libstagefright/rtsp/MyTransmitter.h
deleted file mode 100644
index bf44aff..0000000
--- a/media/libstagefright/rtsp/MyTransmitter.h
+++ /dev/null
@@ -1,984 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MY_TRANSMITTER_H_
-
-#define MY_TRANSMITTER_H_
-
-#include "ARTPConnection.h"
-
-#include <arpa/inet.h>
-#include <sys/socket.h>
-
-#include <openssl/md5.h>
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/base64.h>
-#include <media/stagefright/foundation/hexdump.h>
-
-#ifdef ANDROID
-#include "VideoSource.h"
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaCodecSource.h>
-#endif
-
-namespace android {
-
-#define TRACK_SUFFIX    "trackid=1"
-#define PT              96
-#define PT_STR          "96"
-
-#define USERNAME        "bcast"
-#define PASSWORD        "test"
-
-static int uniformRand(int limit) {
-    return ((double)rand() * limit) / RAND_MAX;
-}
-
-static bool GetAttribute(const char *s, const char *key, AString *value) {
-    value->clear();
-
-    size_t keyLen = strlen(key);
-
-    for (;;) {
-        const char *colonPos = strchr(s, ';');
-
-        size_t len =
-            (colonPos == NULL) ? strlen(s) : colonPos - s;
-
-        if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
-            value->setTo(&s[keyLen + 1], len - keyLen - 1);
-            return true;
-        }
-
-        if (colonPos == NULL) {
-            return false;
-        }
-
-        s = colonPos + 1;
-    }
-}
-
-struct MyTransmitter : public AHandler {
-    MyTransmitter(const char *url, const sp<ALooper> &looper)
-        : mServerURL(url),
-          mLooper(looper),
-          mConn(new ARTSPConnection),
-          mConnected(false),
-          mAuthType(NONE),
-          mRTPSocket(-1),
-          mRTCPSocket(-1),
-          mSourceID(rand()),
-          mSeqNo(uniformRand(65536)),
-          mRTPTimeBase(rand()),
-          mNumSamplesSent(0),
-          mNumRTPSent(0),
-          mNumRTPOctetsSent(0),
-          mLastRTPTime(0),
-          mLastNTPTime(0) {
-        mStreamURL = mServerURL;
-        mStreamURL.append("/bazong.sdp");
-
-        mTrackURL = mStreamURL;
-        mTrackURL.append("/");
-        mTrackURL.append(TRACK_SUFFIX);
-
-        mLooper->registerHandler(this);
-        mLooper->registerHandler(mConn);
-
-        sp<AMessage> reply = new AMessage('conn', this);
-        mConn->connect(mServerURL.c_str(), reply);
-
-#ifdef ANDROID
-        int width = 640;
-        int height = 480;
-
-        sp<MediaSource> source = new VideoSource(width, height);
-
-        sp<AMessage> encMeta = new AMessage;
-        encMeta->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
-        encMeta->setInt32("width", width);
-        encMeta->setInt32("height", height);
-        encMeta->setInt32("frame-rate", 30);
-        encMeta->setInt32("bitrate", 256000);
-        encMeta->setInt32("i-frame-interval", 10);
-
-        sp<ALooper> encLooper = new ALooper;
-        encLooper->setName("rtsp_transmitter");
-        encLooper->start();
-
-        mEncoder = MediaCodecSource::Create(encLooper, encMeta, source);
-
-        mEncoder->start();
-
-        MediaBuffer *buffer;
-        CHECK_EQ(mEncoder->read(&buffer), (status_t)OK);
-        CHECK(buffer != NULL);
-
-        makeH264SPropParamSets(buffer);
-
-        buffer->release();
-        buffer = NULL;
-#endif
-    }
-
-    uint64_t ntpTime() {
-        struct timeval tv;
-        gettimeofday(&tv, NULL);
-
-        uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
-
-        nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-        uint64_t hi = nowUs / 1000000ll;
-        uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
-
-        return (hi << 32) | lo;
-    }
-
-    void issueAnnounce() {
-        AString sdp;
-        sdp = "v=0\r\n";
-
-        sdp.append("o=- ");
-
-        uint64_t ntp = ntpTime();
-        sdp.append(ntp);
-        sdp.append(" ");
-        sdp.append(ntp);
-        sdp.append(" IN IP4 127.0.0.0\r\n");
-
-        sdp.append(
-              "s=Sample\r\n"
-              "i=Playing around with ANNOUNCE\r\n"
-              "c=IN IP4 ");
-
-        struct in_addr addr;
-        addr.s_addr = htonl(mServerIP);
-
-        sdp.append(inet_ntoa(addr));
-
-        sdp.append(
-              "\r\n"
-              "t=0 0\r\n"
-              "a=range:npt=now-\r\n");
-
-#ifdef ANDROID
-        sp<MetaData> meta = mEncoder->getFormat();
-        int32_t width, height;
-        CHECK(meta->findInt32(kKeyWidth, &width));
-        CHECK(meta->findInt32(kKeyHeight, &height));
-
-        sdp.append(
-              "m=video 0 RTP/AVP " PT_STR "\r\n"
-              "b=AS 320000\r\n"
-              "a=rtpmap:" PT_STR " H264/90000\r\n");
-
-        sdp.append("a=cliprect 0,0,");
-        sdp.append(height);
-        sdp.append(",");
-        sdp.append(width);
-        sdp.append("\r\n");
-
-        sdp.append(
-              "a=framesize:" PT_STR " ");
-        sdp.append(width);
-        sdp.append("-");
-        sdp.append(height);
-        sdp.append("\r\n");
-
-        sdp.append(
-              "a=fmtp:" PT_STR " profile-level-id=42C015;sprop-parameter-sets=");
-
-        sdp.append(mSeqParamSet);
-        sdp.append(",");
-        sdp.append(mPicParamSet);
-        sdp.append(";packetization-mode=1\r\n");
-#else
-        sdp.append(
-                "m=audio 0 RTP/AVP " PT_STR "\r\n"
-                "a=rtpmap:" PT_STR " L8/8000/1\r\n");
-#endif
-
-        sdp.append("a=control:" TRACK_SUFFIX "\r\n");
-
-        AString request;
-        request.append("ANNOUNCE ");
-        request.append(mStreamURL);
-        request.append(" RTSP/1.0\r\n");
-
-        addAuthentication(&request, "ANNOUNCE", mStreamURL.c_str());
-
-        request.append("Content-Type: application/sdp\r\n");
-        request.append("Content-Length: ");
-        request.append(sdp.size());
-        request.append("\r\n");
-
-        request.append("\r\n");
-        request.append(sdp);
-
-        sp<AMessage> reply = new AMessage('anno', this);
-        mConn->sendRequest(request.c_str(), reply);
-    }
-
-    void H(const AString &s, AString *out) {
-        out->clear();
-
-        MD5_CTX m;
-        MD5_Init(&m);
-        MD5_Update(&m, s.c_str(), s.size());
-
-        uint8_t key[16];
-        MD5_Final(key, &m);
-
-        for (size_t i = 0; i < 16; ++i) {
-            char nibble = key[i] >> 4;
-            if (nibble <= 9) {
-                nibble += '0';
-            } else {
-                nibble += 'a' - 10;
-            }
-            out->append(&nibble, 1);
-
-            nibble = key[i] & 0x0f;
-            if (nibble <= 9) {
-                nibble += '0';
-            } else {
-                nibble += 'a' - 10;
-            }
-            out->append(&nibble, 1);
-        }
-    }
-
-    void authenticate(const sp<ARTSPResponse> &response) {
-        ssize_t i = response->mHeaders.indexOfKey("www-authenticate");
-        CHECK_GE(i, 0);
-
-        AString value = response->mHeaders.valueAt(i);
-
-        if (!strncmp(value.c_str(), "Basic", 5)) {
-            mAuthType = BASIC;
-        } else {
-            CHECK(!strncmp(value.c_str(), "Digest", 6));
-            mAuthType = DIGEST;
-
-            i = value.find("nonce=");
-            CHECK_GE(i, 0);
-            CHECK_EQ(value.c_str()[i + 6], '\"');
-            ssize_t j = value.find("\"", i + 7);
-            CHECK_GE(j, 0);
-
-            mNonce.setTo(value, i + 7, j - i - 7);
-        }
-
-        issueAnnounce();
-    }
-
-    void addAuthentication(
-            AString *request, const char *method, const char *url) {
-        if (mAuthType == NONE) {
-            return;
-        }
-
-        if (mAuthType == BASIC) {
-            request->append("Authorization: Basic YmNhc3Q6dGVzdAo=\r\n");
-            return;
-        }
-
-        CHECK_EQ((int)mAuthType, (int)DIGEST);
-
-        AString A1;
-        A1.append(USERNAME);
-        A1.append(":");
-        A1.append("Streaming Server");
-        A1.append(":");
-        A1.append(PASSWORD);
-
-        AString A2;
-        A2.append(method);
-        A2.append(":");
-        A2.append(url);
-
-        AString HA1, HA2;
-        H(A1, &HA1);
-        H(A2, &HA2);
-
-        AString tmp;
-        tmp.append(HA1);
-        tmp.append(":");
-        tmp.append(mNonce);
-        tmp.append(":");
-        tmp.append(HA2);
-
-        AString digest;
-        H(tmp, &digest);
-
-        request->append("Authorization: Digest ");
-        request->append("nonce=\"");
-        request->append(mNonce);
-        request->append("\", ");
-        request->append("username=\"" USERNAME "\", ");
-        request->append("uri=\"");
-        request->append(url);
-        request->append("\", ");
-        request->append("response=\"");
-        request->append(digest);
-        request->append("\"");
-        request->append("\r\n");
-    }
-
-    virtual void onMessageReceived(const sp<AMessage> &msg) {
-        switch (msg->what()) {
-            case 'conn':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "connection request completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                if (result != OK) {
-                    (new AMessage('quit', this))->post();
-                    break;
-                }
-
-                mConnected = true;
-
-                CHECK(msg->findInt32("server-ip", (int32_t *)&mServerIP));
-
-                issueAnnounce();
-                break;
-            }
-
-            case 'anno':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "ANNOUNCE completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                sp<RefBase> obj;
-                CHECK(msg->findObject("response", &obj));
-                sp<ARTSPResponse> response;
-
-                if (result == OK) {
-                    response = static_cast<ARTSPResponse *>(obj.get());
-                    CHECK(response != NULL);
-
-                    if (response->mStatusCode == 401) {
-                        if (mAuthType != NONE) {
-                            LOG(INFO) << "FAILED to authenticate";
-                            (new AMessage('quit', this))->post();
-                            break;
-                        }
-
-                        authenticate(response);
-                        break;
-                    }
-                }
-
-                if (result != OK || response->mStatusCode != 200) {
-                    (new AMessage('quit', this))->post();
-                    break;
-                }
-
-                unsigned rtpPort;
-                ARTPConnection::MakePortPair(&mRTPSocket, &mRTCPSocket, &rtpPort);
-
-                // (new AMessage('poll', this))->post();
-
-                AString request;
-                request.append("SETUP ");
-                request.append(mTrackURL);
-                request.append(" RTSP/1.0\r\n");
-
-                addAuthentication(&request, "SETUP", mTrackURL.c_str());
-
-                request.append("Transport: RTP/AVP;unicast;client_port=");
-                request.append(rtpPort);
-                request.append("-");
-                request.append(rtpPort + 1);
-                request.append(";mode=record\r\n");
-                request.append("\r\n");
-
-                sp<AMessage> reply = new AMessage('setu', this);
-                mConn->sendRequest(request.c_str(), reply);
-                break;
-            }
-
-#if 0
-            case 'poll':
-            {
-                fd_set rs;
-                FD_ZERO(&rs);
-                FD_SET(mRTCPSocket, &rs);
-
-                struct timeval tv;
-                tv.tv_sec = 0;
-                tv.tv_usec = 0;
-
-                int res = select(mRTCPSocket + 1, &rs, NULL, NULL, &tv);
-
-                if (res == 1) {
-                    sp<ABuffer> buffer = new ABuffer(65536);
-                    ssize_t n = recv(mRTCPSocket, buffer->data(), buffer->size(), 0);
-
-                    if (n <= 0) {
-                        LOG(ERROR) << "recv returned " << n;
-                    } else {
-                        LOG(INFO) << "recv returned " << n << " bytes of data.";
-
-                        hexdump(buffer->data(), n);
-                    }
-                }
-
-                msg->post(50000);
-                break;
-            }
-#endif
-
-            case 'setu':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "SETUP completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                sp<RefBase> obj;
-                CHECK(msg->findObject("response", &obj));
-                sp<ARTSPResponse> response;
-
-                if (result == OK) {
-                    response = static_cast<ARTSPResponse *>(obj.get());
-                    CHECK(response != NULL);
-                }
-
-                if (result != OK || response->mStatusCode != 200) {
-                    (new AMessage('quit', this))->post();
-                    break;
-                }
-
-                ssize_t i = response->mHeaders.indexOfKey("session");
-                CHECK_GE(i, 0);
-                mSessionID = response->mHeaders.valueAt(i);
-                i = mSessionID.find(";");
-                if (i >= 0) {
-                    // Remove options, i.e. ";timeout=90"
-                    mSessionID.erase(i, mSessionID.size() - i);
-                }
-
-                i = response->mHeaders.indexOfKey("transport");
-                CHECK_GE(i, 0);
-                AString transport = response->mHeaders.valueAt(i);
-
-                LOG(INFO) << "transport = '" << transport << "'";
-
-                AString value;
-                CHECK(GetAttribute(transport.c_str(), "server_port", &value));
-
-                unsigned rtpPort, rtcpPort;
-                CHECK_EQ(sscanf(value.c_str(), "%u-%u", &rtpPort, &rtcpPort), 2);
-
-                CHECK(GetAttribute(transport.c_str(), "source", &value));
-
-                memset(mRemoteAddr.sin_zero, 0, sizeof(mRemoteAddr.sin_zero));
-                mRemoteAddr.sin_family = AF_INET;
-                mRemoteAddr.sin_addr.s_addr = inet_addr(value.c_str());
-                mRemoteAddr.sin_port = htons(rtpPort);
-
-                mRemoteRTCPAddr = mRemoteAddr;
-                mRemoteRTCPAddr.sin_port = htons(rtpPort + 1);
-
-                CHECK_EQ(0, connect(mRTPSocket,
-                                    (const struct sockaddr *)&mRemoteAddr,
-                                    sizeof(mRemoteAddr)));
-
-                CHECK_EQ(0, connect(mRTCPSocket,
-                                    (const struct sockaddr *)&mRemoteRTCPAddr,
-                                    sizeof(mRemoteRTCPAddr)));
-
-                uint32_t x = ntohl(mRemoteAddr.sin_addr.s_addr);
-                LOG(INFO) << "sending data to "
-                     << (x >> 24)
-                     << "."
-                     << ((x >> 16) & 0xff)
-                     << "."
-                     << ((x >> 8) & 0xff)
-                     << "."
-                     << (x & 0xff)
-                     << ":"
-                     << rtpPort;
-
-                AString request;
-                request.append("RECORD ");
-                request.append(mStreamURL);
-                request.append(" RTSP/1.0\r\n");
-
-                addAuthentication(&request, "RECORD", mStreamURL.c_str());
-
-                request.append("Session: ");
-                request.append(mSessionID);
-                request.append("\r\n");
-                request.append("\r\n");
-
-                sp<AMessage> reply = new AMessage('reco', this);
-                mConn->sendRequest(request.c_str(), reply);
-                break;
-            }
-
-            case 'reco':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "RECORD completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                sp<RefBase> obj;
-                CHECK(msg->findObject("response", &obj));
-                sp<ARTSPResponse> response;
-
-                if (result == OK) {
-                    response = static_cast<ARTSPResponse *>(obj.get());
-                    CHECK(response != NULL);
-                }
-
-                if (result != OK) {
-                    (new AMessage('quit', this))->post();
-                    break;
-                }
-
-                (new AMessage('more', this))->post();
-                (new AMessage('sr  ', this))->post();
-                (new AMessage('aliv', this))->post(30000000ll);
-                break;
-            }
-
-            case 'aliv':
-            {
-                if (!mConnected) {
-                    break;
-                }
-
-                AString request;
-                request.append("OPTIONS ");
-                request.append(mStreamURL);
-                request.append(" RTSP/1.0\r\n");
-
-                addAuthentication(&request, "RECORD", mStreamURL.c_str());
-
-                request.append("Session: ");
-                request.append(mSessionID);
-                request.append("\r\n");
-                request.append("\r\n");
-
-                sp<AMessage> reply = new AMessage('opts', this);
-                mConn->sendRequest(request.c_str(), reply);
-                break;
-            }
-
-            case 'opts':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "OPTIONS completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                if (!mConnected) {
-                    break;
-                }
-
-                (new AMessage('aliv', this))->post(30000000ll);
-                break;
-            }
-
-            case 'more':
-            {
-                if (!mConnected) {
-                    break;
-                }
-
-                sp<ABuffer> buffer = new ABuffer(65536);
-                uint8_t *data = buffer->data();
-                data[0] = 0x80;
-                data[1] = (1 << 7) | PT;  // M-bit
-                data[2] = (mSeqNo >> 8) & 0xff;
-                data[3] = mSeqNo & 0xff;
-                data[8] = mSourceID >> 24;
-                data[9] = (mSourceID >> 16) & 0xff;
-                data[10] = (mSourceID >> 8) & 0xff;
-                data[11] = mSourceID & 0xff;
-
-#ifdef ANDROID
-                MediaBuffer *mediaBuf = NULL;
-                for (;;) {
-                    CHECK_EQ(mEncoder->read(&mediaBuf), (status_t)OK);
-                    if (mediaBuf->range_length() > 0) {
-                        break;
-                    }
-                    mediaBuf->release();
-                    mediaBuf = NULL;
-                }
-
-                int64_t timeUs;
-                CHECK(mediaBuf->meta_data()->findInt64(kKeyTime, &timeUs));
-
-                uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
-
-                const uint8_t *mediaData =
-                    (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
-
-                CHECK(!memcmp("\x00\x00\x00\x01", mediaData, 4));
-
-                CHECK_LE(mediaBuf->range_length() - 4 + 12, buffer->size());
-
-                memcpy(&data[12],
-                       mediaData + 4, mediaBuf->range_length() - 4);
-
-                buffer->setRange(0, mediaBuf->range_length() - 4 + 12);
-
-                mediaBuf->release();
-                mediaBuf = NULL;
-#else
-                uint32_t rtpTime = mRTPTimeBase + mNumRTPSent * 128;
-                memset(&data[12], 0, 128);
-                buffer->setRange(0, 12 + 128);
-#endif
-
-                data[4] = rtpTime >> 24;
-                data[5] = (rtpTime >> 16) & 0xff;
-                data[6] = (rtpTime >> 8) & 0xff;
-                data[7] = rtpTime & 0xff;
-
-                ssize_t n = send(
-                        mRTPSocket, data, buffer->size(), 0);
-                if (n < 0) {
-                    LOG(ERROR) << "send failed (" << strerror(errno) << ")";
-                }
-                CHECK_EQ(n, (ssize_t)buffer->size());
-
-                ++mSeqNo;
-
-                ++mNumRTPSent;
-                mNumRTPOctetsSent += buffer->size() - 12;
-
-                mLastRTPTime = rtpTime;
-                mLastNTPTime = ntpTime();
-
-#ifdef ANDROID
-                if (mNumRTPSent < 60 * 25) {  // 60 secs worth
-                    msg->post(40000);
-#else
-                if (mNumRTPOctetsSent < 8000 * 60) {
-                    msg->post(1000000ll * 128 / 8000);
-#endif
-                } else {
-                    LOG(INFO) << "That's enough, pausing.";
-
-                    AString request;
-                    request.append("PAUSE ");
-                    request.append(mStreamURL);
-                    request.append(" RTSP/1.0\r\n");
-
-                    addAuthentication(&request, "PAUSE", mStreamURL.c_str());
-
-                    request.append("Session: ");
-                    request.append(mSessionID);
-                    request.append("\r\n");
-                    request.append("\r\n");
-
-                    sp<AMessage> reply = new AMessage('paus', this);
-                    mConn->sendRequest(request.c_str(), reply);
-                }
-                break;
-            }
-
-            case 'sr  ':
-            {
-                if (!mConnected) {
-                    break;
-                }
-
-                sp<ABuffer> buffer = new ABuffer(65536);
-                buffer->setRange(0, 0);
-
-                addSR(buffer);
-                addSDES(buffer);
-
-                uint8_t *data = buffer->data();
-                ssize_t n = send(
-                        mRTCPSocket, data, buffer->size(), 0);
-                CHECK_EQ(n, (ssize_t)buffer->size());
-
-                msg->post(3000000);
-                break;
-            }
-
-            case 'paus':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "PAUSE completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                sp<RefBase> obj;
-                CHECK(msg->findObject("response", &obj));
-                sp<ARTSPResponse> response;
-
-                AString request;
-                request.append("TEARDOWN ");
-                request.append(mStreamURL);
-                request.append(" RTSP/1.0\r\n");
-
-                addAuthentication(&request, "TEARDOWN", mStreamURL.c_str());
-
-                request.append("Session: ");
-                request.append(mSessionID);
-                request.append("\r\n");
-                request.append("\r\n");
-
-                sp<AMessage> reply = new AMessage('tear', this);
-                mConn->sendRequest(request.c_str(), reply);
-                break;
-            }
-
-            case 'tear':
-            {
-                int32_t result;
-                CHECK(msg->findInt32("result", &result));
-
-                LOG(INFO) << "TEARDOWN completed with result "
-                     << result << " (" << strerror(-result) << ")";
-
-                sp<RefBase> obj;
-                CHECK(msg->findObject("response", &obj));
-                sp<ARTSPResponse> response;
-
-                if (result == OK) {
-                    response = static_cast<ARTSPResponse *>(obj.get());
-                    CHECK(response != NULL);
-                }
-
-                (new AMessage('quit', this))->post();
-                break;
-            }
-
-            case 'disc':
-            {
-                LOG(INFO) << "disconnect completed";
-
-                mConnected = false;
-                (new AMessage('quit', this))->post();
-                break;
-            }
-
-            case 'quit':
-            {
-                if (mConnected) {
-                    mConn->disconnect(new AMessage('disc', this));
-                    break;
-                }
-
-                if (mRTPSocket >= 0) {
-                    close(mRTPSocket);
-                    mRTPSocket = -1;
-                }
-
-                if (mRTCPSocket >= 0) {
-                    close(mRTCPSocket);
-                    mRTCPSocket = -1;
-                }
-
-#ifdef ANDROID
-                mEncoder->stop();
-                mEncoder.clear();
-#endif
-
-                mLooper->stop();
-                break;
-            }
-
-            default:
-                TRESPASS();
-        }
-    }
-
-protected:
-    virtual ~MyTransmitter() {
-    }
-
-private:
-    enum AuthType {
-        NONE,
-        BASIC,
-        DIGEST
-    };
-
-    AString mServerURL;
-    AString mTrackURL;
-    AString mStreamURL;
-
-    sp<ALooper> mLooper;
-    sp<ARTSPConnection> mConn;
-    bool mConnected;
-    uint32_t mServerIP;
-    AuthType mAuthType;
-    AString mNonce;
-    AString mSessionID;
-    int mRTPSocket, mRTCPSocket;
-    uint32_t mSourceID;
-    uint32_t mSeqNo;
-    uint32_t mRTPTimeBase;
-    struct sockaddr_in mRemoteAddr;
-    struct sockaddr_in mRemoteRTCPAddr;
-    size_t mNumSamplesSent;
-    uint32_t mNumRTPSent;
-    uint32_t mNumRTPOctetsSent;
-    uint32_t mLastRTPTime;
-    uint64_t mLastNTPTime;
-
-#ifdef ANDROID
-    sp<MediaSource> mEncoder;
-    AString mSeqParamSet;
-    AString mPicParamSet;
-
-    void makeH264SPropParamSets(MediaBuffer *buffer) {
-        static const char kStartCode[] = "\x00\x00\x00\x01";
-
-        const uint8_t *data =
-            (const uint8_t *)buffer->data() + buffer->range_offset();
-        size_t size = buffer->range_length();
-
-        CHECK_GE(size, 0u);
-        CHECK(!memcmp(kStartCode, data, 4));
-
-        data += 4;
-        size -= 4;
-
-        size_t startCodePos = 0;
-        while (startCodePos + 3 < size
-                && memcmp(kStartCode, &data[startCodePos], 4)) {
-            ++startCodePos;
-        }
-
-        CHECK_LT(startCodePos + 3, size);
-
-        encodeBase64(data, startCodePos, &mSeqParamSet);
-
-        encodeBase64(&data[startCodePos + 4], size - startCodePos - 4,
-                     &mPicParamSet);
-    }
-#endif
-
-    void addSR(const sp<ABuffer> &buffer) {
-        uint8_t *data = buffer->data() + buffer->size();
-
-        data[0] = 0x80 | 0;
-        data[1] = 200;  // SR
-        data[2] = 0;
-        data[3] = 6;
-        data[4] = mSourceID >> 24;
-        data[5] = (mSourceID >> 16) & 0xff;
-        data[6] = (mSourceID >> 8) & 0xff;
-        data[7] = mSourceID & 0xff;
-
-        data[8] = mLastNTPTime >> (64 - 8);
-        data[9] = (mLastNTPTime >> (64 - 16)) & 0xff;
-        data[10] = (mLastNTPTime >> (64 - 24)) & 0xff;
-        data[11] = (mLastNTPTime >> 32) & 0xff;
-        data[12] = (mLastNTPTime >> 24) & 0xff;
-        data[13] = (mLastNTPTime >> 16) & 0xff;
-        data[14] = (mLastNTPTime >> 8) & 0xff;
-        data[15] = mLastNTPTime & 0xff;
-
-        data[16] = (mLastRTPTime >> 24) & 0xff;
-        data[17] = (mLastRTPTime >> 16) & 0xff;
-        data[18] = (mLastRTPTime >> 8) & 0xff;
-        data[19] = mLastRTPTime & 0xff;
-
-        data[20] = mNumRTPSent >> 24;
-        data[21] = (mNumRTPSent >> 16) & 0xff;
-        data[22] = (mNumRTPSent >> 8) & 0xff;
-        data[23] = mNumRTPSent & 0xff;
-
-        data[24] = mNumRTPOctetsSent >> 24;
-        data[25] = (mNumRTPOctetsSent >> 16) & 0xff;
-        data[26] = (mNumRTPOctetsSent >> 8) & 0xff;
-        data[27] = mNumRTPOctetsSent & 0xff;
-
-        buffer->setRange(buffer->offset(), buffer->size() + 28);
-    }
-
-    void addSDES(const sp<ABuffer> &buffer) {
-        uint8_t *data = buffer->data() + buffer->size();
-        data[0] = 0x80 | 1;
-        data[1] = 202;  // SDES
-        data[4] = mSourceID >> 24;
-        data[5] = (mSourceID >> 16) & 0xff;
-        data[6] = (mSourceID >> 8) & 0xff;
-        data[7] = mSourceID & 0xff;
-
-        size_t offset = 8;
-
-        data[offset++] = 1;  // CNAME
-
-        static const char *kCNAME = "andih@laptop";
-        data[offset++] = strlen(kCNAME);
-
-        memcpy(&data[offset], kCNAME, strlen(kCNAME));
-        offset += strlen(kCNAME);
-
-        data[offset++] = 7;  // NOTE
-
-        static const char *kNOTE = "Hell's frozen over.";
-        data[offset++] = strlen(kNOTE);
-
-        memcpy(&data[offset], kNOTE, strlen(kNOTE));
-        offset += strlen(kNOTE);
-
-        data[offset++] = 0;
-
-        if ((offset % 4) > 0) {
-            size_t count = 4 - (offset % 4);
-            switch (count) {
-                case 3:
-                    data[offset++] = 0;
-                case 2:
-                    data[offset++] = 0;
-                case 1:
-                    data[offset++] = 0;
-            }
-        }
-
-        size_t numWords = (offset / 4) - 1;
-        data[2] = numWords >> 8;
-        data[3] = numWords & 0xff;
-
-        buffer->setRange(buffer->offset(), buffer->size() + offset);
-    }
-
-    DISALLOW_EVIL_CONSTRUCTORS(MyTransmitter);
-};
-
-}  // namespace android
-
-#endif  // MY_TRANSMITTER_H_
diff --git a/media/libstagefright/rtsp/NetworkUtils.cpp b/media/libstagefright/rtsp/NetworkUtils.cpp
index c053be8..e8ec64d 100644
--- a/media/libstagefright/rtsp/NetworkUtils.cpp
+++ b/media/libstagefright/rtsp/NetworkUtils.cpp
@@ -20,7 +20,7 @@
 #define LOG_TAG "NetworkUtils"
 #include <utils/Log.h>
 
-#include "NetworkUtils.h"
+#include <media/stagefright/rtsp/NetworkUtils.h>
 #include <cutils/qtaguid.h>
 #include <NetdClient.h>
 
diff --git a/media/libstagefright/rtsp/NetworkUtilsForAppProc.cpp b/media/libstagefright/rtsp/NetworkUtilsForAppProc.cpp
index 662159c..30fc38a 100644
--- a/media/libstagefright/rtsp/NetworkUtilsForAppProc.cpp
+++ b/media/libstagefright/rtsp/NetworkUtilsForAppProc.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "NetworkUtils"
 #include <utils/Log.h>
 
-#include "NetworkUtils.h"
+#include <media/stagefright/rtsp/NetworkUtils.h>
 
 // NetworkUtils implementation for application process.
 namespace android {
diff --git a/media/libstagefright/rtsp/QualManager.cpp b/media/libstagefright/rtsp/QualManager.cpp
index 37aa326..f1f8222 100644
--- a/media/libstagefright/rtsp/QualManager.cpp
+++ b/media/libstagefright/rtsp/QualManager.cpp
@@ -21,7 +21,7 @@
 #include <sys/prctl.h>
 #include <utils/Log.h>
 
-#include "QualManager.h"
+#include <media/stagefright/rtsp/QualManager.h>
 
 namespace android {
 
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
index e236267..8cd33cf 100644
--- a/media/libstagefright/rtsp/SDPLoader.cpp
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -18,9 +18,10 @@
 #define LOG_TAG "SDPLoader"
 #include <utils/Log.h>
 
-#include "include/SDPLoader.h"
+// #include "include/SDPLoader.h"
+#include <media/stagefright/rtsp/SDPLoader.h>
 
-#include "ASessionDescription.h"
+#include <media/stagefright/rtsp/ASessionDescription.h>
 
 #include <datasource/MediaHTTP.h>
 #include <media/MediaHTTPConnection.h>
diff --git a/media/libstagefright/rtsp/UDPPusher.cpp b/media/libstagefright/rtsp/UDPPusher.cpp
index 5c685a1..4e812f5 100644
--- a/media/libstagefright/rtsp/UDPPusher.cpp
+++ b/media/libstagefright/rtsp/UDPPusher.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "UDPPusher"
 #include <utils/Log.h>
 
-#include "UDPPusher.h"
+#include <media/stagefright/rtsp/UDPPusher.h>
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/rtsp/AAMRAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAMRAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AAMRAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AAMRAssembler.h
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AAVCAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
diff --git a/media/libstagefright/rtsp/AH263Assembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AH263Assembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AH263Assembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AH263Assembler.h
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AHEVCAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
diff --git a/media/libstagefright/rtsp/AMPEG2TSAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AMPEG2TSAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AMPEG2TSAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AMPEG2TSAssembler.h
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AMPEG4AudioAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AMPEG4AudioAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AMPEG4AudioAssembler.h
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AMPEG4ElementaryAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/AMPEG4ElementaryAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/AMPEG4ElementaryAssembler.h
diff --git a/media/libstagefright/rtsp/APacketSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/APacketSource.h
similarity index 100%
rename from media/libstagefright/rtsp/APacketSource.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/APacketSource.h
diff --git a/media/libstagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
similarity index 81%
rename from media/libstagefright/rtsp/ARTPAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index f959c40..39161b6 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -105,6 +105,27 @@
             (long long)rtp, (long long)play, (long long)exp, isExp);
 }
 
+struct ReceptionReportBlock : public RefBase {
+    uint32_t ssrc;       // ssrc of data source being reported
+    uint32_t fraction;   // fraction lost since last SR/RR
+    int32_t lost;        // cumul. no. pkts lost (signed!)
+    uint32_t lastSeq;    // extended last seq. no. received
+    uint32_t jitter;     // interarrival jitter
+    uint32_t lsr;        // last SR packet from this source
+    uint32_t dlsr;       // delay since last SR packet
+
+    ReceptionReportBlock(uint32_t ssrc, uint32_t fraction, int32_t lost, uint32_t lastSeq,
+            uint32_t jitter, uint32_t lsr, uint32_t dlsr) {
+        this->ssrc = ssrc;
+        this->fraction = fraction;
+        this->lost = lost;
+        this->lastSeq = lastSeq;
+        this->jitter = jitter;
+        this->lsr = lsr;
+        this->dlsr = dlsr;
+    }
+};
+
 }  // namespace android
 
 #endif  // A_RTP_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
similarity index 92%
rename from media/libstagefright/rtsp/ARTPConnection.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 36cca31..73d2866 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -110,7 +110,10 @@
     status_t parseRTP(StreamInfo *info, const sp<ABuffer> &buffer);
     status_t parseRTPExt(StreamInfo *s, const uint8_t *extData, size_t extLen, int32_t *cvoDegrees);
     status_t parseRTCP(StreamInfo *info, const sp<ABuffer> &buffer);
-    status_t parseSR(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseSenderReport(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseReceiverReport(StreamInfo *info, const uint8_t *data, size_t size);
+    status_t parseReceptionReportBlock(StreamInfo *info,
+            int64_t recvTimeUs, uint32_t senderId, const uint8_t *data, size_t size);
     status_t parseTSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parsePSFB(StreamInfo *info, const uint8_t *data, size_t size);
     status_t parseBYE(StreamInfo *info, const uint8_t *data, size_t size);
diff --git a/media/libstagefright/rtsp/ARTPSession.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSession.h
similarity index 100%
rename from media/libstagefright/rtsp/ARTPSession.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSession.h
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
similarity index 94%
rename from media/libstagefright/rtsp/ARTPSource.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index 4984e91..e9b4942 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -36,6 +36,7 @@
 struct ABuffer;
 struct AMessage;
 struct ARTPAssembler;
+struct ReceptionReportBlock;
 struct ASessionDescription;
 
 struct ARTPSource : public RefBase {
@@ -59,8 +60,10 @@
 
     void processRTPPacket(const sp<ABuffer> &buffer);
     void processRTPPacket();
+    void processReceptionReportBlock(
+            int64_t recvTimeUs, uint32_t senderId, sp<ReceptionReportBlock> rrb);
     void timeReset();
-    void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
+    void timeUpdate(int64_t recvTimeUs, uint32_t rtpTime, uint64_t ntpTime);
     void byeReceived();
 
     List<sp<ABuffer> > *queue() { return &mQueue; }
@@ -135,6 +138,8 @@
     uint64_t mLastSrNtpTime;
     int64_t mLastSrUpdateTimeUs;
 
+    int64_t mLastRrUpdateTimeUs;
+
     bool mIsFirstRtpRtcpGap;
     double mAvgRtpRtcpGapMs;
     double mAvgUnderlineDelayMs;
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
similarity index 100%
rename from media/libstagefright/rtsp/ARTPWriter.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTSPConnection.h
similarity index 100%
rename from media/libstagefright/rtsp/ARTSPConnection.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTSPConnection.h
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARawAudioAssembler.h
similarity index 100%
rename from media/libstagefright/rtsp/ARawAudioAssembler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ARawAudioAssembler.h
diff --git a/media/libstagefright/rtsp/ASessionDescription.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ASessionDescription.h
similarity index 100%
rename from media/libstagefright/rtsp/ASessionDescription.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/ASessionDescription.h
diff --git a/media/libstagefright/rtsp/JitterCalculator.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/JitterCalculator.h
similarity index 100%
rename from media/libstagefright/rtsp/JitterCalculator.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/JitterCalculator.h
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/MyHandler.h
similarity index 100%
rename from media/libstagefright/rtsp/MyHandler.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/MyHandler.h
diff --git a/media/libstagefright/rtsp/NetworkUtils.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/NetworkUtils.h
similarity index 100%
rename from media/libstagefright/rtsp/NetworkUtils.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/NetworkUtils.h
diff --git a/media/libstagefright/rtsp/QualManager.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/QualManager.h
similarity index 100%
rename from media/libstagefright/rtsp/QualManager.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/QualManager.h
diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/SDPLoader.h
similarity index 100%
rename from media/libstagefright/include/SDPLoader.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/SDPLoader.h
diff --git a/media/libstagefright/rtsp/TrafficRecorder.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/TrafficRecorder.h
similarity index 100%
rename from media/libstagefright/rtsp/TrafficRecorder.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/TrafficRecorder.h
diff --git a/media/libstagefright/rtsp/UDPPusher.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/UDPPusher.h
similarity index 100%
rename from media/libstagefright/rtsp/UDPPusher.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/UDPPusher.h
diff --git a/media/libstagefright/rtsp/VideoSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/VideoSource.h
similarity index 100%
rename from media/libstagefright/rtsp/VideoSource.h
rename to media/libstagefright/rtsp/include/media/stagefright/rtsp/VideoSource.h
diff --git a/media/libstagefright/rtsp/rtp_test.cpp b/media/libstagefright/rtsp/rtp_test.cpp
index 4590699..a8cd7e4 100644
--- a/media/libstagefright/rtsp/rtp_test.cpp
+++ b/media/libstagefright/rtsp/rtp_test.cpp
@@ -27,9 +27,9 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/SimpleDecodingSource.h>
 
-#include "ARTPSession.h"
-#include "ASessionDescription.h"
-#include "UDPPusher.h"
+#include <media/stagefright/rtsp/ARTPSession.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
+#include <media/stagefright/rtsp/UDPPusher.h>
 
 using namespace android;
 
@@ -182,7 +182,7 @@
     CHECK_EQ(decoder->start(), (status_t)OK);
 
     for (;;) {
-        MediaBuffer *buffer;
+        MediaBufferBase *buffer;
         status_t err = decoder->read(&buffer);
 
         if (err != OK) {
@@ -201,7 +201,7 @@
 #if 1
         if (buffer->range_length() != 0) {
             int64_t timeUs;
-            CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
+            CHECK(buffer->meta_data().findInt64(kKeyTime, &timeUs));
 
             printf("decoder returned frame of size %zu at time %.2f secs\n",
                    buffer->range_length(), timeUs / 1E6);
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index a799a13..e6b67ce 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -32,11 +32,6 @@
         "liblog",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-        "frameworks/av/media/libstagefright/include",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
index 91bf385..7a0ba52 100644
--- a/media/libstagefright/tests/HEVC/Android.bp
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -44,10 +44,6 @@
         "libstagefright_foundation",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
index 324a042..c43e1f8 100644
--- a/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
+++ b/media/libstagefright/tests/HEVC/HEVCUtilsUnitTest.cpp
@@ -21,7 +21,7 @@
 #include <fstream>
 
 #include <media/stagefright/foundation/ABitReader.h>
-#include "include/HevcUtils.h"
+#include <HevcUtils.h>
 
 #include "HEVCUtilsTestEnvironment.h"
 
diff --git a/media/libstagefright/tests/MediaCodecListOverrides_test.cpp b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
index 0c22a42..20737db 100644
--- a/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
+++ b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
@@ -20,11 +20,10 @@
 
 #include <gtest/gtest.h>
 
-#include "MediaCodecListOverrides.h"
-
 #include <media/MediaCodecInfo.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaCodecListOverrides.h>
 
 #include <vector>
 
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index 13d5b89..a067284 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -51,10 +51,6 @@
         "libstagefright_foundation",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-    ],
-
     // TODO: (b/150181583)
     compile_multilib: "first",
 
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 0097830..2bcfd67 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,9 +32,6 @@
         "liblog",
         "media_permission-aidl-cpp",
     ],
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-    ],
 }
 
 cc_fuzz {
@@ -74,6 +71,7 @@
     srcs: [
         "FrameDecoderFuzzer.cpp",
     ],
+    corpus: ["corpus/*"],
     defaults: ["libstagefright_fuzzer_defaults"],
 }
 
@@ -85,10 +83,10 @@
     ],
     dictionary: "dictionaries/formats.dict",
     defaults: ["libstagefright_fuzzer_defaults"],
+    header_libs: [
+        "libstagefright_webm_headers",
+    ],
     static_libs: [
-        "libstagefright_webm",
         "libdatasource",
-        "libstagefright_esds",
-        "libogg",
     ],
 }
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index c251479..4218d2d 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include "include/FrameDecoder.h"
+#include <FrameDecoder.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/IMediaSource.h>
 #include <media/stagefright/MetaData.h>
@@ -46,12 +46,15 @@
     }
 
     while (fdp.remaining_bytes()) {
-        switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 3)) {
-            case 0:
-                decoder->init(/*frameTimeUs*/ fdp.ConsumeIntegral<int64_t>(),
-                              /*option*/ fdp.ConsumeIntegral<int>(),
-                              /*colorFormat*/ fdp.ConsumeIntegral<int>());
+        uint8_t switchCase = fdp.ConsumeIntegralInRange<uint8_t>(0, 3);
+        switch (switchCase) {
+            case 0: {
+                int64_t frameTimeUs = fdp.ConsumeIntegral<int64_t>();
+                int option = fdp.ConsumeIntegral<int>();
+                int colorFormat = fdp.ConsumeIntegral<int>();
+                decoder->init(frameTimeUs, option, colorFormat);
                 break;
+            }
             case 1:
                 decoder->extractFrame();
                 break;
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 810ae95..d94c8ff 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -23,7 +23,8 @@
 #include <media/stagefright/OggWriter.h>
 
 #include "MediaMimeTypes.h"
-#include "webm/WebmWriter.h"
+
+#include <webm/WebmWriter.h>
 
 namespace android {
 std::string genMimeType(FuzzedDataProvider *dataProvider) {
@@ -121,4 +122,4 @@
     }
     return writer;
 }
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/corpus/color_format_rgb_565.dat b/media/libstagefright/tests/fuzzers/corpus/color_format_rgb_565.dat
new file mode 100644
index 0000000..698e21d
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/color_format_rgb_565.dat
Binary files differ
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index ac1e9b1..a8e64b6 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -393,3 +393,51 @@
     std::this_thread::sleep_for(std::chrono::milliseconds(100));
     looper->stop();
 }
+
+TEST(MediaCodecTest, DeadWhileStoppingError) {
+    // Test scenario:
+    //
+    // 1) Client thread calls stop(); MediaCodec looper thread calls
+    //    initiateShutdown(); shutdown is being handled at the component thread.
+    // 2) An error occurs while handling initiateShutdown().
+    // 3) MediaCodec looper thread handles the error.
+    // 4) Codec service dies after the error is handled
+    // 5) MediaCodec looper thread handles the death.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateShutdown(_))
+                .WillByDefault([mockCodec](bool) {
+                    // 2)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                    // 4)
+                    mockCodec->callback()->onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+                    // Codec service has died, no callback.
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+    codec->stop();
+    // sleep here so that the looper thread can handle the error
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    looper->stop();
+}
diff --git a/media/libstagefright/tests/writer/Android.bp b/media/libstagefright/tests/writer/Android.bp
index 38d5ecc..49fb569 100644
--- a/media/libstagefright/tests/writer/Android.bp
+++ b/media/libstagefright/tests/writer/Android.bp
@@ -52,10 +52,6 @@
         "libogg",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
-    ],
-
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/timedtext/Android.bp b/media/libstagefright/timedtext/Android.bp
index 6590ef7..619e06b 100644
--- a/media/libstagefright/timedtext/Android.bp
+++ b/media/libstagefright/timedtext/Android.bp
@@ -35,8 +35,16 @@
         cfi: true,
     },
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
+    export_include_dirs: [
+        "include",
+    ],
+
+    local_include_dirs: [
+        "include/timedtext",
+    ],
+
+    header_libs: [
+        "libstagefright_headers",
     ],
 
     shared_libs: ["libmedia"],
diff --git a/media/libstagefright/timedtext/TextDescriptions.h b/media/libstagefright/timedtext/include/timedtext/TextDescriptions.h
similarity index 100%
rename from media/libstagefright/timedtext/TextDescriptions.h
rename to media/libstagefright/timedtext/include/timedtext/TextDescriptions.h
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index 0b632bf..ae97c50 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -36,17 +36,17 @@
 
     static_libs: [
         "libstagefright_timedtext",
-        "libstagefright_foundation",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
+    header_libs: [
+        "libstagefright_headers",
     ],
 
     shared_libs: [
         "liblog",
         "libmedia",
         "libbinder",
+        "libstagefright_foundation",
     ],
 
     cflags: [
diff --git a/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
index d85ae39..f934b54 100644
--- a/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
+++ b/media/libstagefright/timedtext/test/TimedTextUnitTest.cpp
@@ -27,7 +27,7 @@
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/ByteUtils.h>
 
-#include "timedtext/TextDescriptions.h"
+#include <timedtext/TextDescriptions.h>
 
 #include "TimedTextTestEnvironment.h"
 
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 32a22ba..6ed3e0e 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -33,7 +33,11 @@
         "WebmWriter.cpp",
     ],
 
-    include_dirs: ["frameworks/av/include"],
+    local_include_dirs: [
+        "include/webm",
+    ],
+
+    export_include_dirs: ["include"],
 
     shared_libs: [
         "libdatasource",
@@ -44,7 +48,20 @@
     ],
 
     header_libs: [
+        "av-headers",
         "libmedia_headers",
-        "media_ndk_headers",
     ],
 }
+
+
+cc_library_headers {
+    name: "libstagefright_webm_headers",
+    export_include_dirs: ["include"],
+
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libstagefright/webm/EbmlUtil.h b/media/libstagefright/webm/include/webm/EbmlUtil.h
similarity index 100%
rename from media/libstagefright/webm/EbmlUtil.h
rename to media/libstagefright/webm/include/webm/EbmlUtil.h
diff --git a/media/libstagefright/webm/LinkedBlockingQueue.h b/media/libstagefright/webm/include/webm/LinkedBlockingQueue.h
similarity index 100%
rename from media/libstagefright/webm/LinkedBlockingQueue.h
rename to media/libstagefright/webm/include/webm/LinkedBlockingQueue.h
diff --git a/media/libstagefright/webm/WebmConstants.h b/media/libstagefright/webm/include/webm/WebmConstants.h
similarity index 100%
rename from media/libstagefright/webm/WebmConstants.h
rename to media/libstagefright/webm/include/webm/WebmConstants.h
diff --git a/media/libstagefright/webm/WebmElement.h b/media/libstagefright/webm/include/webm/WebmElement.h
similarity index 100%
rename from media/libstagefright/webm/WebmElement.h
rename to media/libstagefright/webm/include/webm/WebmElement.h
diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/include/webm/WebmFrame.h
similarity index 100%
rename from media/libstagefright/webm/WebmFrame.h
rename to media/libstagefright/webm/include/webm/WebmFrame.h
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/include/webm/WebmFrameThread.h
similarity index 100%
rename from media/libstagefright/webm/WebmFrameThread.h
rename to media/libstagefright/webm/include/webm/WebmFrameThread.h
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/include/webm/WebmWriter.h
similarity index 100%
rename from media/libstagefright/webm/WebmWriter.h
rename to media/libstagefright/webm/include/webm/WebmWriter.h
diff --git a/media/libstagefright/webm/tests/Android.bp b/media/libstagefright/webm/tests/Android.bp
index 4443766..629ee47 100644
--- a/media/libstagefright/webm/tests/Android.bp
+++ b/media/libstagefright/webm/tests/Android.bp
@@ -31,8 +31,8 @@
         "WebmFrameThreadUnitTest.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
+    header_libs: [
+        "libstagefright_headers",
     ],
 
     static_libs: [
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index a33b888..b81f27e 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -119,7 +119,7 @@
         "libstagefright_webm",
         "libdatasource",
     ],
-    include_dirs: [
-        "frameworks/av/media/libstagefright",
+    header_libs: [
+        "libstagefright_headers",
     ],
 }
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index ee7af70..b97f347 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -174,10 +174,13 @@
             params.sampleRate = 16000;
         } else {
             params.sampleRate = max(1, params.sampleRate);
+            params.channelCount = max(0, params.channelCount);
         }
         format->setInt32("channel-count", params.channelCount);
         format->setInt32("sample-rate", params.sampleRate);
     } else if (!strncmp(params.mime, "video/", 6)) {
+        params.width = max(1, params.width);
+        params.height = max(1, params.height);
         format->setInt32("width", params.width);
         format->setInt32("height", params.height);
     }
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 055dd80..afc873c 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -41,8 +41,6 @@
         "-Wall",
     ],
 
-    clang: true,
-
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 411c206..5506a73 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -39,7 +39,7 @@
         darwin: {
             enabled: false,
         },
-        linux_glibc: {
+        glibc: {
             cflags: [
                 "-Dsigev_notify_thread_id=_sigev_un._tid",
             ],
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index e25658f..edddaa4 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -33,7 +33,7 @@
 
     shared_libs: [
         "android.hardware.media.omx@1.0",
-        "libandroidicu",
+        "libicu",
         "libfmq",
         "libbinder",
         "libhidlbase",
@@ -47,11 +47,6 @@
         "libregistermsext",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libmediaplayerservice",
-        "frameworks/av/services/mediaresourcemanager",
-    ],
-
     // By default mediaserver runs in 32-bit to save memory, except
     // on 64-bit-only lunch targets.
     // ****************************************************************
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 58e2d2a..026847a 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -25,9 +25,8 @@
 #include <utils/Log.h>
 #include "RegisterExtensions.h"
 
-// from LOCAL_C_INCLUDES
-#include "MediaPlayerService.h"
-#include "ResourceManagerService.h"
+#include <MediaPlayerService.h>
+#include <ResourceManagerService.h>
 
 using namespace android;
 
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 1928ba8..54d3d4a 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,6 +1,5 @@
 set noparent
 
-marcone@google.com
 jsharkey@android.com
 jameswei@google.com
 rmojumder@google.com
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 8d527e9..ddc71db 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -63,6 +63,7 @@
 }
 
 // for use with header_libs
+// supports use as <media/NdkMediaExtractor.h> not <NdkMediaExtractor.h>
 cc_library_headers {
     name: "media_ndk_headers",
     vendor_available: true,
@@ -103,8 +104,6 @@
     ],
 
     include_dirs: [
-        "frameworks/base/core/jni",
-        "frameworks/native/include/media/openmax",
         "system/media/camera/include",
     ],
 
@@ -145,6 +144,7 @@
         "libgui",
         "libui",
         "libmediandk_utils",
+        "android.hardware.drm-V1-ndk",
     ],
 
     export_header_lib_headers: ["jni_headers"],
@@ -167,7 +167,7 @@
     stubs: {
         symbol_file: "libmediandk.map.txt",
         versions: ["29"],
-    },
+    }
 }
 
 cc_library {
@@ -177,9 +177,8 @@
         "NdkMediaDataSourceCallbacks.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
-        "frameworks/av/media/ndk/include",
+    local_include_dirs: [
+        "include",
     ],
 
     export_include_dirs: [
@@ -193,7 +192,9 @@
     ],
 
     header_libs: [
+        "libstagefright_headers",
         "libmedia_headers",
+        "libstagefright_headers",
     ],
 
     shared_libs: [
@@ -223,6 +224,7 @@
         "libcutils",
         "android.hardware.graphics.bufferqueue@1.0",
     ],
+
     header_libs: [
         "libstagefright_foundation_headers",
     ],
@@ -230,9 +232,6 @@
     cflags: [
         "-D__ANDROID_VNDK__",
     ],
-    include_dirs: [
-        "frameworks/av/media/ndk/",
-    ],
 }
 
 cc_library_static {
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 5fdd45a..12a0d53 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -25,7 +25,6 @@
 #include <android_media_Utils.h>
 #include <private/android/AHardwareBufferHelpers.h>
 #include <utils/Log.h>
-#include "hardware/camera3.h"
 
 using namespace android;
 
@@ -375,8 +374,8 @@
     uint8_t* jpegBuffer = mLockedBuffer->data;
 
     // First check for JPEG transport header at the end of the buffer
-    uint8_t* header = jpegBuffer + (width - sizeof(struct camera3_jpeg_blob));
-    struct camera3_jpeg_blob* blob = (struct camera3_jpeg_blob*)(header);
+    uint8_t* header = jpegBuffer + (width - sizeof(struct camera3_jpeg_blob_v2));
+    struct camera3_jpeg_blob_v2* blob = (struct camera3_jpeg_blob_v2*)(header);
     if (blob->jpeg_blob_id == CAMERA3_JPEG_BLOB_ID) {
         size = blob->jpeg_size;
         ALOGV("%s: Jpeg size = %d", __FUNCTION__, size);
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 1ae2b44..38e422d 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -15,6 +15,8 @@
  */
 
 #include <inttypes.h>
+#include <mutex>
+#include <set>
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NdkMediaCodec"
@@ -29,6 +31,7 @@
 #include <gui/Surface.h>
 
 #include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 
 #include <media/stagefright/PersistentSurface.h>
@@ -41,6 +44,7 @@
 
 
 static media_status_t translate_error(status_t err) {
+
     if (err == OK) {
         return AMEDIA_OK;
     } else if (err == -EAGAIN) {
@@ -50,7 +54,18 @@
     } else if (err == DEAD_OBJECT) {
         return AMEDIACODEC_ERROR_RECLAIMED;
     }
-    ALOGE("sf error code: %d", err);
+
+    {
+        // minimize log flooding. Some CTS behavior made this noisy and apps could do the same.
+        static std::set<status_t> untranslated;
+        static std::mutex mutex;
+        std::lock_guard lg(mutex);
+
+        if (untranslated.find(err) == untranslated.end()) {
+            ALOGE("untranslated sf error code: %d", err);
+            untranslated.insert(err);
+        }
+    }
     return AMEDIA_ERROR_UNKNOWN;
 }
 
@@ -59,6 +74,7 @@
     kWhatAsyncNotify,
     kWhatRequestActivityNotifications,
     kWhatStopActivityNotifications,
+    kWhatFrameRenderedNotify,
 };
 
 struct AMediaCodecPersistentSurface : public Surface {
@@ -98,6 +114,11 @@
     mutable Mutex mAsyncCallbackLock;
     AMediaCodecOnAsyncNotifyCallback mAsyncCallback;
     void *mAsyncCallbackUserData;
+
+    sp<AMessage> mFrameRenderedNotify;
+    mutable Mutex mFrameRenderedCallbackLock;
+    AMediaCodecOnFrameRendered mFrameRenderedCallback;
+    void *mFrameRenderedCallbackUserData;
 };
 
 CodecHandler::CodecHandler(AMediaCodec *codec) {
@@ -158,8 +179,7 @@
                      }
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
                          mCodec->mAsyncCallback.onAsyncInputAvailable(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -205,8 +225,7 @@
                          (uint32_t)flags};
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
                          mCodec->mAsyncCallback.onAsyncOutputAvailable(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -234,8 +253,7 @@
                      AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(&copy);
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
                          mCodec->mAsyncCallback.onAsyncFormatChanged(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -259,12 +277,11 @@
                          break;
                      }
                      msg->findString("detail", &detail);
-                     ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
-                           err, actionCode, detail.c_str());
+                     ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)",
+                           err, StrMediaError(err).c_str(), actionCode, detail.c_str());
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncError != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncError != NULL) {
                          mCodec->mAsyncCallback.onAsyncError(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -298,6 +315,43 @@
             break;
         }
 
+        case kWhatFrameRenderedNotify:
+        {
+            sp<AMessage> data;
+            if (!msg->findMessage("data", &data)) {
+                ALOGE("kWhatFrameRenderedNotify: data is expected.");
+                break;
+            }
+
+            AMessage::Type type;
+            int64_t mediaTimeUs, systemNano;
+            size_t index = 0;
+
+            // TODO. This code has dependency with MediaCodec::CreateFramesRenderedMessage.
+            for (size_t ix = 0; ix < data->countEntries(); ix++) {
+                AString name = data->getEntryNameAt(ix, &type);
+                if (name.startsWith(AStringPrintf("%zu-media-time-us", index).c_str())) {
+                    AMessage::ItemData data = msg->getEntryAt(index);
+                    data.find(&mediaTimeUs);
+                } else if (name.startsWith(AStringPrintf("%zu-system-nano", index).c_str())) {
+                    AMessage::ItemData data = msg->getEntryAt(index);
+                    data.find(&systemNano);
+
+                    Mutex::Autolock _l(mCodec->mFrameRenderedCallbackLock);
+                    if (mCodec->mFrameRenderedCallback != NULL) {
+                        mCodec->mFrameRenderedCallback(
+                                mCodec,
+                                mCodec->mFrameRenderedCallbackUserData,
+                                mediaTimeUs,
+                                systemNano);
+                    }
+
+                    index++;
+                }
+            }
+            break;
+        }
+
         default:
             ALOGE("shouldn't be here");
             break;
@@ -452,17 +506,19 @@
         uint32_t flags) {
     sp<AMessage> nativeFormat;
     AMediaFormat_getFormat(format, &nativeFormat);
-    ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
+    // create our shallow copy, so we aren't victim to any later changes.
+    sp<AMessage> dupNativeFormat = nativeFormat->dup();
+    ALOGV("configure with format: %s", dupNativeFormat->debugString(0).c_str());
     sp<Surface> surface = NULL;
     if (window != NULL) {
         surface = (Surface*) window;
     }
 
-    status_t err = mData->mCodec->configure(nativeFormat, surface,
+    status_t err = mData->mCodec->configure(dupNativeFormat, surface,
             crypto ? crypto->mCrypto : NULL, flags);
     if (err != OK) {
         ALOGE("configure: err(%d), failed with format: %s",
-              err, nativeFormat->debugString(0).c_str());
+              err, dupNativeFormat->debugString(0).c_str());
     }
     return translate_error(err);
 }
@@ -472,22 +528,46 @@
         AMediaCodec *mData,
         AMediaCodecOnAsyncNotifyCallback callback,
         void *userdata) {
-    if (mData->mAsyncNotify == NULL && userdata != NULL) {
-        mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
-        status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
-        if (err != OK) {
-            ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
-            return translate_error(err);
-        }
-    }
 
     Mutex::Autolock _l(mData->mAsyncCallbackLock);
+
+    if (mData->mAsyncNotify == NULL) {
+        mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+    }
+
+    // always call, codec may have been reset/re-configured since last call.
+    status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
+    if (err != OK) {
+        ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
+        return translate_error(err);
+    }
+
     mData->mAsyncCallback = callback;
     mData->mAsyncCallbackUserData = userdata;
 
     return AMEDIA_OK;
 }
 
+EXPORT
+media_status_t AMediaCodec_setOnFrameRenderedCallback(
+        AMediaCodec *mData,
+        AMediaCodecOnFrameRendered callback,
+        void *userdata) {
+    Mutex::Autolock _l(mData->mFrameRenderedCallbackLock);
+    if (mData->mFrameRenderedNotify == NULL) {
+        mData->mFrameRenderedNotify = new AMessage(kWhatFrameRenderedNotify, mData->mHandler);
+    }
+    status_t err = mData->mCodec->setOnFrameRenderedNotification(mData->mFrameRenderedNotify);
+    if (err != OK) {
+        ALOGE("setOnFrameRenderedNotifyCallback: err(%d), failed to set callback", err);
+        return translate_error(err);
+    }
+
+    mData->mFrameRenderedCallback = callback;
+    mData->mFrameRenderedCallbackUserData = userdata;
+
+    return AMEDIA_OK;
+}
 
 EXPORT
 media_status_t AMediaCodec_releaseCrypto(AMediaCodec *mData) {
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 6e9945d..59c1103 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -97,6 +97,8 @@
     List<idvec_t> mIds;
     KeyedVector<String8, String8> mQueryResults;
     Vector<uint8_t> mKeyRequest;
+    String8 mDefaultUrl;
+    AMediaDrmKeyRequestType mkeyRequestType;
     Vector<uint8_t> mProvisionRequest;
     String8 mProvisionUrl;
     String8 mPropertyString;
@@ -416,6 +418,21 @@
         const AMediaDrmKeyValue *optionalParameters, size_t numOptionalParameters,
         const uint8_t **keyRequest, size_t *keyRequestSize) {
 
+    return AMediaDrm_getKeyRequestWithDefaultUrlAndType(mObj,
+        scope, init, initSize, mimeType, keyType, optionalParameters,
+        numOptionalParameters, keyRequest,
+        keyRequestSize, NULL, NULL);
+}
+
+EXPORT
+media_status_t AMediaDrm_getKeyRequestWithDefaultUrlAndType(AMediaDrm *mObj,
+        const AMediaDrmScope *scope, const uint8_t *init, size_t initSize,
+        const char *mimeType, AMediaDrmKeyType keyType,
+        const AMediaDrmKeyValue *optionalParameters,
+        size_t numOptionalParameters, const uint8_t **keyRequest,
+        size_t *keyRequestSize, const char **defaultUrl,
+        AMediaDrmKeyRequestType *keyRequestType) {
+
     if (!mObj || mObj->mDrm == NULL) {
         return AMEDIA_ERROR_INVALID_OBJECT;
     }
@@ -449,18 +466,43 @@
         mdOptionalParameters.add(String8(optionalParameters[i].mKey),
                 String8(optionalParameters[i].mValue));
     }
-    String8 defaultUrl;
-    DrmPlugin::KeyRequestType keyRequestType;
+
+    DrmPlugin::KeyRequestType requestType;
     mObj->mKeyRequest.clear();
     status_t status = mObj->mDrm->getKeyRequest(*iter, mdInit, String8(mimeType),
-            mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl,
-            &keyRequestType);
+            mdKeyType, mdOptionalParameters, mObj->mKeyRequest, mObj->mDefaultUrl,
+            &requestType);
     if (status != OK) {
         return translateStatus(status);
     } else {
         *keyRequest = mObj->mKeyRequest.array();
         *keyRequestSize = mObj->mKeyRequest.size();
+        if (defaultUrl != NULL)
+            *defaultUrl = mObj->mDefaultUrl.string();
+        switch(requestType) {
+            case DrmPlugin::kKeyRequestType_Initial:
+                mObj->mkeyRequestType = KEY_REQUEST_TYPE_INITIAL;
+                break;
+            case DrmPlugin::kKeyRequestType_Renewal:
+                mObj->mkeyRequestType = KEY_REQUEST_TYPE_RENEWAL;
+                break;
+            case DrmPlugin::kKeyRequestType_Release:
+                mObj->mkeyRequestType = KEY_REQUEST_TYPE_RELEASE;
+                break;
+            case DrmPlugin::kKeyRequestType_None:
+                mObj->mkeyRequestType = KEY_REQUEST_TYPE_NONE;
+                break;
+            case DrmPlugin::kKeyRequestType_Update:
+                mObj->mkeyRequestType = KEY_REQUEST_TYPE_UPDATE;
+                break;
+            default:
+                return AMEDIA_ERROR_UNKNOWN;
+        }
+
+        if (keyRequestType != NULL)
+            *keyRequestType = mObj->mkeyRequestType;
     }
+
     return AMEDIA_OK;
 }
 
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 07fc5de..6d3c348 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -388,9 +388,11 @@
         mode = CryptoPlugin::kMode_AES_CTR;
     }
 
+    sp<ABuffer> clearbuf;
+    sp<ABuffer> cryptedbuf;
     if (sizeof(uint32_t) != sizeof(size_t)) {
-        sp<ABuffer> clearbuf   = U32ArrayToSizeBuf(numSubSamples, (uint32_t *)cleardata);
-        sp<ABuffer> cryptedbuf = U32ArrayToSizeBuf(numSubSamples, (uint32_t *)crypteddata);
+        clearbuf   = U32ArrayToSizeBuf(numSubSamples, (uint32_t *)cleardata);
+        cryptedbuf = U32ArrayToSizeBuf(numSubSamples, (uint32_t *)crypteddata);
         cleardata   = clearbuf    == NULL ? NULL : clearbuf->data();
         crypteddata = crypteddata == NULL ? NULL : cryptedbuf->data();
         if(crypteddata == NULL || cleardata == NULL) {
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 69ab242..923453a 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -361,6 +361,7 @@
         "mpegh-reference-channel-layout";
 EXPORT const char* AMEDIAFORMAT_KEY_OPERATING_RATE = "operating-rate";
 EXPORT const char* AMEDIAFORMAT_KEY_PCM_ENCODING = "pcm-encoding";
+EXPORT const char* AMEDIAFORMAT_KEY_PICTURE_TYPE = "picture-type";
 EXPORT const char* AMEDIAFORMAT_KEY_PRIORITY = "priority";
 EXPORT const char* AMEDIAFORMAT_KEY_PROFILE = "profile";
 EXPORT const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN = "pcm-big-endian";
@@ -394,6 +395,9 @@
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_ID = "track-id";
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_INDEX = "track-index";
 EXPORT const char* AMEDIAFORMAT_KEY_VALID_SAMPLES = "valid-samples";
+EXPORT const char* AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL =
+        "video-encoding-statistics-level";
+EXPORT const char* AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE = "video-qp-average";
 EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MAX = "video-qp-b-max";
 EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MIN = "video-qp-b-min";
 EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MAX = "video-qp-i-max";
diff --git a/media/ndk/OWNERS b/media/ndk/OWNERS
index 9dc441e..83644f0 100644
--- a/media/ndk/OWNERS
+++ b/media/ndk/OWNERS
@@ -1,3 +1,4 @@
-marcone@google.com
+essick@google.com
+lajos@google.com
 # For AImage/AImageReader
 include platform/frameworks/av:/camera/OWNERS
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 519148e..4938f76 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -39,6 +39,8 @@
 #include <stdint.h>
 #include <sys/cdefs.h>
 
+#include <android/api-level.h>
+
 #include "NdkMediaCrypto.h"
 #include "NdkMediaError.h"
 #include "NdkMediaFormat.h"
@@ -122,6 +124,25 @@
 } AMediaCodecOnAsyncNotifyCallback;
 
 /**
+ * Called when an output frame has rendered on the output surface.
+ *
+ * \param codec       The codec object that generated this notification.
+ * \param userdata    The user data set at AMediaCodec_setOnFrameRenderedCallback.
+ * \param mediaTimeUs The presentation time (media time) of the frame rendered.
+ *                    This is usually the same as specified in
+ *                    AMediaCodec_queueInputBuffer, but some codecs may alter
+ *                    the media time by applying some time-based transformation,
+ *                    such as frame rate conversion. In that case, presentation
+ *                    time corresponds to the actual output frame rendered.
+ * \param systemNano  The system time when the frame was rendered.
+ */
+typedef void (*AMediaCodecOnFrameRendered)(
+        AMediaCodec *codec,
+        void *userdata,
+        int64_t mediaTimeUs,
+        int64_t systemNano);
+
+/**
  * Create codec by name. Use this if you know the exact codec you want to use.
  * When configuring, you will need to specify whether to use the codec as an
  * encoder or decoder.
@@ -416,18 +437,28 @@
 
 /**
  * Set an asynchronous callback for actionable AMediaCodec events.
- * When asynchronous callback is enabled, the client should not call
+ * When asynchronous callback is enabled, it is an error for the client to call
  * AMediaCodec_getInputBuffers(), AMediaCodec_getOutputBuffers(),
  * AMediaCodec_dequeueInputBuffer() or AMediaCodec_dequeueOutputBuffer().
  *
- * Also, AMediaCodec_flush() behaves differently in asynchronous mode.
- * After calling AMediaCodec_flush(), you must call AMediaCodec_start() to
- * "resume" receiving input buffers, even if an input surface was created.
+ * AMediaCodec_flush() behaves differently in asynchronous mode.
+ * After calling AMediaCodec_flush(), the client must call AMediaCodec_start() to
+ * "resume" receiving input buffers. Even if the client does not receive
+ * AMediaCodecOnAsyncInputAvailable callbacks from video encoders configured
+ * with an input surface, the client still needs to call AMediaCodec_start()
+ * to resume the input surface to send buffers to the encoders.
+ *
+ * When called with null callback, this method unregisters any previously set callback.
  *
  * Refer to the definition of AMediaCodecOnAsyncNotifyCallback on how each
  * callback function is called and what are specified.
- * The specified userdata is the pointer used when those callback functions are
- * called.
+ * The specified userdata is opaque data which will be passed along
+ * when the callback functions are called. MediaCodec does not look at or alter the
+ * value of userdata. Often it is a pointer to a client-owned object,
+ * and client manages the lifecycle of the object in that case.
+ *
+ * Once the callback is unregistered or the codec is reset / released, the
+ * previously registered callback will not be called.
  *
  * All callbacks are fired on one NDK internal thread.
  * AMediaCodec_setAsyncNotifyCallback should not be called on the callback thread.
@@ -441,6 +472,39 @@
         void *userdata) __INTRODUCED_IN(28);
 
 /**
+ * Registers a callback to be invoked when an output frame is rendered on the output surface.
+ *
+ * This method can be called in any codec state, but will only have an effect in the
+ * Executing state for codecs that render buffers to the output surface.
+ *
+ * This callback is for informational purposes only: to get precise
+ * render timing samples, and can be significantly delayed and batched. Some frames may have
+ * been rendered even if there was no callback generated.
+ *
+ * When called with null callback, this method unregisters any previously set callback.
+ *
+ * Refer to the definition of AMediaCodecOnFrameRendered on how each
+ * callback function is called and what are specified.
+ * The specified userdata is opaque data which will be passed along
+ * when the callback functions are called. MediaCodec does not look at or alter the
+ * value of userdata. Often it is a pointer to a client-owned object,
+ * and client manages the lifecycle of the object in that case.
+ *
+ * Once the callback is unregistered or the codec is reset / released, the
+ * previously registered callback will not be called.
+ *
+ * All callbacks are fired on one NDK internal thread.
+ * AMediaCodec_setOnFrameRenderedCallback should not be called on the callback thread.
+ * No heavy duty task should be performed on callback thread.
+ *
+ * Available since Android T.
+ */
+media_status_t AMediaCodec_setOnFrameRenderedCallback(
+        AMediaCodec*,
+        AMediaCodecOnFrameRendered callback,
+        void *userdata) __INTRODUCED_IN(__ANDROID_API_T__);
+
+/**
  * Release the crypto if applicable.
  *
  * Available since API level 28.
diff --git a/media/ndk/include/media/NdkMediaDrm.h b/media/ndk/include/media/NdkMediaDrm.h
index 849a8f9..4eca3d7 100644
--- a/media/ndk/include/media/NdkMediaDrm.h
+++ b/media/ndk/include/media/NdkMediaDrm.h
@@ -112,6 +112,41 @@
 } AMediaDrmKeyType;
 
 /**
+ * Introduced in API 33.
+ */
+typedef enum AMediaDrmKeyRequestType : int32_t {
+    /**
+     * Key request type is initial license request.
+     * An initial license request is necessary to load keys.
+     */
+    KEY_REQUEST_TYPE_INITIAL,
+
+    /**
+     * Key request type is license renewal.
+     * A renewal license request is necessary to prevent the keys from expiring.
+     */
+    KEY_REQUEST_TYPE_RENEWAL,
+
+    /**
+     * Key request type is license release.
+     * A license release request indicates that keys are removed.
+     */
+    KEY_REQUEST_TYPE_RELEASE,
+
+    /**
+     * Keys are already loaded and are available for use. No license request is necessary, and
+     * no key request data is returned.
+     */
+    KEY_REQUEST_TYPE_NONE,
+
+    /**
+     * Keys have been loaded but an additional license request is needed
+     * to update their values.
+     */
+    KEY_REQUEST_TYPE_UPDATE
+} AMediaDrmKeyRequestType;
+
+/**
  *  Data type containing {key, value} pair
  */
 typedef struct AMediaDrmKeyValuePair {
@@ -248,7 +283,10 @@
  * to obtain or release keys used to decrypt encrypted content.
  * AMediaDrm_getKeyRequest is used to obtain an opaque key request byte array that
  * is delivered to the license server.  The opaque key request byte array is
- * returned in KeyRequest.data.
+ * returned in *keyRequest and the number of bytes in the request is
+ * returned in *keyRequestSize.
+ * This API has same functionality as AMediaDrm_getKeyRequestWithDefaultUrlAndType()
+ * when defaultUrl and keyRequestType are passed in as NULL.
  *
  * After the app has received the key request response from the server,
  * it should deliver to the response to the DRM engine plugin using the method
@@ -280,11 +318,14 @@
  *   by the caller
  *
  * On exit:
+ *   If this returns AMEDIA_OK,
  *   1. The keyRequest pointer will reference the opaque key request data.  It
  *       will reside in memory owned by the AMediaDrm object, and will remain
- *       accessible until the next call to AMediaDrm_getKeyRequest or until the
+ *       accessible until the next call to AMediaDrm_getKeyRequest
+ *       or AMediaDrm_getKeyRequestWithDefaultUrlAndType or until the
  *       MediaDrm object is released.
  *   2. keyRequestSize will be set to the size of the request
+ *   If this does not return AMEDIA_OK, value of these parameters should not be used.
  *
  * Returns MEDIADRM_NOT_PROVISIONED_ERROR if reprovisioning is needed, due to a
  * problem with the device certificate.
@@ -297,6 +338,72 @@
         const uint8_t **keyRequest, size_t *keyRequestSize) __INTRODUCED_IN(21);
 
 /**
+ * A key request/response exchange occurs between the app and a license server
+ * to obtain or release keys used to decrypt encrypted content.
+ * AMediaDrm_getKeyRequest is used to obtain an opaque key request byte array that
+ * is delivered to the license server.  The opaque key request byte array is
+ * returned in *keyRequest and the number of bytes in the request is
+ * returned in *keyRequestSize.
+ *
+ * After the app has received the key request response from the server,
+ * it should deliver to the response to the DRM engine plugin using the method
+ * AMediaDrm_provideKeyResponse.
+ *
+ * scope may be a sessionId or a keySetId, depending on the specified keyType.
+ * When the keyType is KEY_TYPE_STREAMING or KEY_TYPE_OFFLINE, scope should be set
+ * to the sessionId the keys will be provided to.  When the keyType is
+ * KEY_TYPE_RELEASE, scope should be set to the keySetId of the keys being released.
+ * Releasing keys from a device invalidates them for all sessions.
+ *
+ * init container-specific data, its meaning is interpreted based on the mime type
+ * provided in the mimeType parameter.  It could contain, for example, the content
+ * ID, key ID or other data obtained from the content metadata that is required in
+ * generating the key request. init may be null when keyType is KEY_TYPE_RELEASE.
+ *
+ * initSize is the number of bytes of initData
+ *
+ * mimeType identifies the mime type of the content.
+ *
+ * keyType specifes the type of the request. The request may be to acquire keys for
+ *   streaming or offline content, or to release previously acquired keys, which are
+ *   identified by a keySetId.
+ *
+ * optionalParameters are included in the key request message to allow a client
+ *   application to provide additional message parameters to the server.
+ *
+ * numOptionalParameters indicates the number of optional parameters provided
+ *   by the caller
+ *
+ * On exit:
+ *   If this returns AMEDIA_OK,
+ *   1. The keyRequest pointer will reference the opaque key request data.  It
+ *       will reside in memory owned by the AMediaDrm object, and will remain
+ *       accessible until the next call to either AMediaDrm_getKeyRequest
+ *       or AMediaDrm_getKeyRequestWithDefaultUrlAndType or until the
+ *       MediaDrm object is released.
+ *   2. keyRequestSize will be set to the size of the request.
+ *   3. defaultUrl will be set to the recommended URL to deliver the key request.
+ *      The defaultUrl pointer will reference a NULL terminated URL string.
+ *      It will be UTF-8 encoded and have same lifetime with the key request data
+ *      KeyRequest pointer references to. Passing in NULL means you don't need it
+ *      to be reported.
+ *   4. keyRequestType will be set to the key request type. Passing in NULL means
+*       you don't need it to be reported.
+ *
+ * Returns MEDIADRM_NOT_PROVISIONED_ERROR if reprovisioning is needed, due to a
+ * problem with the device certificate.
+ *
+ * Available since API level 33.
+ */
+media_status_t AMediaDrm_getKeyRequestWithDefaultUrlAndType(AMediaDrm *,
+        const AMediaDrmScope *scope, const uint8_t *init, size_t initSize,
+        const char *mimeType, AMediaDrmKeyType keyType,
+        const AMediaDrmKeyValue *optionalParameters,
+        size_t numOptionalParameters, const uint8_t **keyRequest,
+        size_t *keyRequestSize, const char **defaultUrl,
+        AMediaDrmKeyRequestType *keyRequestType) __INTRODUCED_IN(__ANDROID_API_T__);
+
+/**
  * A key response is received from the license server by the app, then it is
  * provided to the DRM engine plugin using provideKeyResponse.  When the
  * response is for an offline key request, a keySetId is returned that can be
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 2d2fcc0..2195657 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -311,6 +311,10 @@
 extern const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND __INTRODUCED_IN(31);
 
+extern const char* AMEDIAFORMAT_KEY_PICTURE_TYPE __INTRODUCED_IN(33);
+extern const char* AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL __INTRODUCED_IN(33);
+extern const char* AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE __INTRODUCED_IN(33);
+
 extern const char* AMEDIAFORMAT_VIDEO_QP_B_MAX __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_VIDEO_QP_B_MIN __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_VIDEO_QP_I_MAX __INTRODUCED_IN(31);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 6f275c7..c8faced 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -203,6 +203,7 @@
     AMediaCodec_releaseOutputBuffer;
     AMediaCodec_releaseOutputBufferAtTime;
     AMediaCodec_setAsyncNotifyCallback; # introduced=28
+    AMediaCodec_setOnFrameRenderedCallback; # introduced=Tiramisu
     AMediaCodec_setOutputSurface; # introduced=24
     AMediaCodec_setParameters; # introduced=26
     AMediaCodec_setInputSurface; # introduced=26
@@ -229,6 +230,7 @@
     AMediaDrm_decrypt;
     AMediaDrm_encrypt;
     AMediaDrm_getKeyRequest;
+    AMediaDrm_getKeyRequestWithDefaultUrlAndType; # introduced=Tiramisu
     AMediaDrm_getPropertyByteArray;
     AMediaDrm_getPropertyString;
     AMediaDrm_getProvisionRequest;
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
index b17541d..75d73bf 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -30,7 +30,8 @@
         android:roundIcon="@mipmap/ic_launcher_round"
         android:supportsRtl="true"
         android:theme="@style/AppTheme">
-        <activity android:name="com.android.media.samplevideoencoder.MainActivity">
+        <activity android:name="com.android.media.samplevideoencoder.MainActivity"
+            android:exported="true">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
                 <category android:name="android.intent.category.LAUNCHER" />
@@ -42,4 +43,4 @@
         android:targetPackage="com.android.media.samplevideoencoder"
         android:label="SampleVideoEncoder Test"/>
 
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 2e06da5..4b44dcf 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,7 +69,6 @@
 java_defaults {
     name: "MediaBenchmark-defaults",
 
-    sdk_version: "system_current",
     min_sdk_version: "28",
-    target_sdk_version: "29",
+    target_sdk_version: "30",
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b2aee1a..b222d47 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -17,21 +17,21 @@
 buildscript {
     repositories {
         google()
-        jcenter()
+        mavenCentral()
     }
     dependencies {
-        classpath 'com.android.tools.build:gradle:3.5.0'
+        classpath 'com.android.tools.build:gradle:4.2.1'
     }
 }
 
 apply plugin: 'com.android.application'
 
 android {
-    compileSdkVersion 29
+    compileSdkVersion 30
     defaultConfig {
         applicationId "com.android.media.benchmark"
         minSdkVersion 28
-        targetSdkVersion 29
+        targetSdkVersion 30
         versionCode 1
         versionName "1.0"
         testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
@@ -57,20 +57,20 @@
     externalNativeBuild {
         cmake {
             path "src/main/cpp/CMakeLists.txt"
-            version "3.10.2"
+            version "3.18.1"
         }
     }
 }
 
 repositories {
     google()
-    jcenter()
+    mavenCentral()
 }
 
 dependencies {
     implementation fileTree(dir: 'libs', include: ['*.jar'])
-    implementation 'androidx.appcompat:appcompat:1.1.0'
-    testImplementation 'junit:junit:4.12'
-    androidTestImplementation 'androidx.test:runner:1.2.0'
-    androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+    implementation 'androidx.appcompat:appcompat:1.3.0'
+    testImplementation 'junit:junit:4.13.2'
+    androidTestImplementation 'androidx.test:runner:1.3.0'
+    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
 }
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
index af92424..0192d68 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -9,7 +9,6 @@
 
 cc_test_library {
     name: "libmediabenchmark_jni",
-    sdk_version: "current",
 
     defaults: [
         "libmediabenchmark_common-defaults",
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index 6b54c6a..7988979 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -55,7 +55,6 @@
 
 cc_defaults {
     name: "libmediabenchmark-defaults",
-    sdk_version: "current",
     stl: "c++_shared",
 
     shared_libs: [
@@ -95,10 +94,6 @@
         "libmediabenchmark_soft_sanitize_all-defaults",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/codec2/hidl/client/include",
-    ],
-
     shared_libs: [
         "libcodec2_client",
         "libmediandk",
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.cpp b/media/tests/benchmark/src/native/extractor/Extractor.cpp
index f0bb3b9..3bdfbad 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.cpp
+++ b/media/tests/benchmark/src/native/extractor/Extractor.cpp
@@ -124,9 +124,7 @@
 
     int64_t sTime = mStats->getCurTime();
     if (mExtractor) {
-        // TODO: (b/140128505) Multiple calls result in DoS.
-        // Uncomment call to AMediaExtractor_delete() once this is resolved
-        // AMediaExtractor_delete(mExtractor);
+        AMediaExtractor_delete(mExtractor);
         mExtractor = nullptr;
     }
     int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index 0fbd20d..9a8caa3 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -33,7 +33,12 @@
 
     srcs: ["ExtractorTest.cpp"],
 
-    static_libs: ["libmediabenchmark_extractor"]
+    static_libs: ["libmediabenchmark_extractor"],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
 }
 
 cc_test {
@@ -50,6 +55,11 @@
         "libmediabenchmark_extractor",
         "libmediabenchmark_decoder",
     ],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
 }
 
 cc_test {
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 81ef02a..3666724 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -21,6 +21,8 @@
 #include <iostream>
 #include <limits>
 
+#include <android/binder_process.h>
+
 #include "BenchmarkTestEnvironment.h"
 #include "Decoder.h"
 
@@ -175,6 +177,7 @@
                                             "c2.android.hevc.decoder", true)));
 
 int main(int argc, char **argv) {
+    ABinderProcess_startThreadPool();
     gEnv = new BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index d14d15b..27ee9ba 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -19,6 +19,8 @@
 
 #include <gtest/gtest.h>
 
+#include <android/binder_process.h>
+
 #include "BenchmarkTestEnvironment.h"
 #include "Extractor.h"
 
@@ -73,6 +75,7 @@
                                                      0)));
 
 int main(int argc, char **argv) {
+    ABinderProcess_startThreadPool();
     gEnv = new BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 73c4e3b..a38ef57 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -28,12 +28,18 @@
         "AImageReaderUtils.cpp",
         "BatteryNotifier.cpp",
         "ISchedulingPolicyService.cpp",
+        "Library.cpp",
         "LimitProcessMemory.cpp",
+        "MediaUtilsDelayed.cpp",
         "MemoryLeakTrackUtil.cpp",
+        "MethodStatistics.cpp",
+        "Process.cpp",
         "ProcessInfo.cpp",
         "SchedulingPolicyService.cpp",
         "ServiceUtilities.cpp",
+        "ThreadSnapshot.cpp",
         "TimeCheck.cpp",
+        "TimerThread.cpp",
     ],
     static_libs: [
         "libc_malloc_debug_backtrace",
@@ -42,7 +48,7 @@
     ],
     shared_libs: [
         "libaudioclient_aidl_conversion",
-        "libaudioutils", // for clock.h
+        "libaudioutils", // for clock.h, Statistics.h
         "libbinder",
         "libcutils",
         "liblog",
@@ -51,6 +57,7 @@
         "libpermission",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
+        "packagemanager_aidl-cpp",
     ],
     export_static_lib_headers: [
         "libbatterystats_aidl",
@@ -73,6 +80,10 @@
         "libpermission",
     ],
 
+    required: [
+        "libmediautils_delayed",  // lazy loaded
+    ],
+
     include_dirs: [
         // For DEBUGGER_SIGNAL
         "system/core/debuggerd/include",
@@ -82,6 +93,23 @@
 }
 
 cc_library {
+    name: "libmediautils_delayed", // match with MEDIAUTILS_DELAYED_LIBRARY_NAME
+    srcs: [
+        "MediaUtilsDelayedLibrary.cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+    shared_libs: [
+        "liblog",
+        "libutils",
+        "libutilscallstack",
+    ],
+}
+
+cc_library {
     name: "libmediautils_vendor",
     vendor_available: true,  // required for platform/hardware/interfaces
     srcs: [
@@ -117,3 +145,15 @@
 
     export_include_dirs: ["include"],
 }
+
+cc_test {
+    name: "libmediautils_test",
+    srcs: [
+        "memory-test.cpp",
+        "TimerThread-test.cpp",
+    ],
+    shared_libs: [
+      "libmediautils",
+      "libutils",
+    ]
+}
diff --git a/media/utils/Library.cpp b/media/utils/Library.cpp
new file mode 100644
index 0000000..c1e22bf
--- /dev/null
+++ b/media/utils/Library.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Library"
+#include <utils/Log.h>
+#include <mediautils/Library.h>
+
+namespace {
+
+std::string dlerrorIfPresent() {
+    const char *dlerr = dlerror();
+    if (dlerr == nullptr) return "dlerror: none";
+    return std::string("dlerror: '").append(dlerr).append("'");
+}
+
+}
+namespace android::mediautils {
+
+std::shared_ptr<void> loadLibrary(const char *libraryName, int flags) {
+    std::shared_ptr<void> library{
+        dlopen(libraryName, flags),
+        [](void *lib) {
+            if (lib != nullptr) {
+                const int ret = dlclose(lib);
+                ALOGW_IF(ret !=0, "%s: dlclose(%p) == %d, %s",
+                        __func__, lib, ret, dlerrorIfPresent().c_str());
+            }
+        }
+    };
+
+    if (!library) {
+        ALOGW("%s: cannot load libraryName %s, %s",
+            __func__, libraryName, dlerrorIfPresent().c_str());
+        return {};
+    }
+    return library;
+}
+
+std::shared_ptr<void> getUntypedObjectFromLibrary(
+        const char *objectName, const std::shared_ptr<void>& library) {
+    if (!library) {
+        ALOGW("%s: null library, cannot load objectName %s", __func__, objectName);
+        return {};
+    }
+    void *ptr = dlsym(library.get(), objectName);
+    if (ptr == nullptr) {
+        ALOGW("%s: cannot load objectName %s, %s",
+                __func__, objectName, dlerrorIfPresent().c_str());
+        return {};
+    }
+
+    // Note: we use the "aliasing" constructor of the std:shared_ptr.
+    //
+    // https://en.cppreference.com/w/cpp/memory/shared_ptr/shared_ptr
+    //
+    return { library, ptr };  // returns shared_ptr to ptr, but ref counted on library.
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/MediaUtilsDelayed.cpp b/media/utils/MediaUtilsDelayed.cpp
new file mode 100644
index 0000000..c6c092d
--- /dev/null
+++ b/media/utils/MediaUtilsDelayed.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/Library.h>
+#include <mediautils/MediaUtilsDelayed.h>
+#include "MediaUtilsDelayedLibrary.h"
+
+#define LOG_TAG "MediaUtilsDelayed"
+#include <utils/Log.h>
+#include <memory>
+
+namespace android::mediautils {
+
+namespace {
+//  Specific implementation details for MediaUtils Delayed Library.
+
+// The following use static Meyer's singleton caches instead of letting
+// refcounted management as provided above. This is for speed.
+std::shared_ptr<void> getDelayedLibrary() {
+    static std::shared_ptr<void> library = loadLibrary(MEDIAUTILS_DELAYED_LIBRARY_NAME);
+    return library;
+}
+
+// Get the delayed dispatch table.  This is refcounted and keeps the underlying library alive.
+std::shared_ptr<delayed_library::DelayedDispatchTable> getDelayedDispatchTable() {
+    static auto delayedDispatchTable =
+            getObjectFromLibrary<delayed_library::DelayedDispatchTable>(
+                    MEDIAUTILS_DELAYED_DISPATCH_TABLE_SYMBOL_NAME, getDelayedLibrary());
+    return delayedDispatchTable;
+}
+
+} // namespace
+
+// Public implementations of methods here.
+
+std::string getCallStackStringForTid(pid_t tid) {
+    auto delayedDispatchTable = getDelayedDispatchTable();
+    if (!delayedDispatchTable) return {};  // on failure, return empty string
+    return delayedDispatchTable->getCallStackStringForTid(tid);
+}
+
+} // android::mediautils
diff --git a/media/utils/MediaUtilsDelayedLibrary.cpp b/media/utils/MediaUtilsDelayedLibrary.cpp
new file mode 100644
index 0000000..9054c1a
--- /dev/null
+++ b/media/utils/MediaUtilsDelayedLibrary.cpp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MediaUtilsDelayedLibrary.h"
+#include <utils/CallStack.h>
+
+// Methods that are dynamically linked.
+namespace {
+
+std::string getCallStackStringForTid(pid_t tid) {
+    android::CallStack cs{};
+    cs.update(0 /* ignoreDepth */, tid);
+    return cs.toString().c_str();
+}
+
+} // namespace
+
+// leave global, this is picked up from dynamic linking
+android::mediautils::delayed_library::DelayedDispatchTable gDelayedDispatchTable {
+    getCallStackStringForTid,
+};
diff --git a/media/utils/MediaUtilsDelayedLibrary.h b/media/utils/MediaUtilsDelayedLibrary.h
new file mode 100644
index 0000000..3d72a3a
--- /dev/null
+++ b/media/utils/MediaUtilsDelayedLibrary.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <unistd.h>
+
+// This should not be directly included by clients.
+// Use MediaUtilsDelayed.h instead.
+
+namespace android::mediautils::delayed_library {
+
+// Use a dispatch table to return methods from the delayed library
+struct DelayedDispatchTable {
+    std::string (*getCallStackStringForTid)(pid_t tid);
+};
+
+// Match with Android.bp and MediaUtilsDelayed.cpp.
+#define MEDIAUTILS_DELAYED_LIBRARY_NAME "libmediautils_delayed.so"
+
+// Match with MediaUtilsDelayed.cpp and MediaUtilsDelayedLibrary.cpp
+#define MEDIAUTILS_DELAYED_DISPATCH_TABLE_SYMBOL_NAME "gDelayedDispatchTable"
+
+} // namespace android::mediautils::delayed_library
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
new file mode 100644
index 0000000..b179b20
--- /dev/null
+++ b/media/utils/MethodStatistics.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/MethodStatistics.h>
+
+namespace android::mediautils {
+
+// Repository for MethodStatistics Objects
+
+std::shared_ptr<std::vector<std::string>>
+getStatisticsClassesForModule(std::string_view moduleName) {
+    static const std::map<std::string, std::shared_ptr<std::vector<std::string>>> m {
+        {
+            METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL,
+            std::shared_ptr<std::vector<std::string>>(
+                new std::vector<std::string>{
+                "DeviceHalHidl",
+                "EffectHalHidl",
+                "StreamInHalHidl",
+                "StreamOutHalHidl",
+              })
+        },
+    };
+    auto it = m.find({moduleName.begin(), moduleName.end()});
+    if (it == m.end()) return {};
+    return it->second;
+}
+
+static void addClassesToMap(const std::shared_ptr<std::vector<std::string>> &classNames,
+        std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> &map) {
+    if (classNames) {
+        for (const auto& className : *classNames) {
+            map.emplace(className, std::make_shared<MethodStatistics<std::string>>());
+        }
+    }
+}
+
+// singleton statistics for DeviceHalHidl StreamOutHalHidl StreamInHalHidl
+std::shared_ptr<MethodStatistics<std::string>>
+getStatisticsForClass(std::string_view className) {
+    static const std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m =
+        // copy elided initialization of map m.
+        [](){
+            std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m;
+            addClassesToMap(
+                    getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL),
+                    m);
+            return m;
+        }();
+
+    auto it = m.find({className.begin(), className.end()});
+    if (it == m.end()) return {};
+    return it->second;
+}
+
+} // android::mediautils
diff --git a/media/utils/Process.cpp b/media/utils/Process.cpp
new file mode 100644
index 0000000..8fe8003
--- /dev/null
+++ b/media/utils/Process.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Process"
+#include <utils/Log.h>
+#include <mediautils/Process.h>
+
+#include <android-base/file.h>
+#include <android-base/strings.h>
+#include <cstdlib>
+
+namespace {
+
+void processLine(std::string_view s, std::map<std::string, double>& m) {
+    if (s.empty()) return;
+
+    const size_t colon_pos = s.find(':');
+    if (colon_pos == std::string_view::npos) return;
+
+    const size_t space_pos = s.find(' ');
+    if (space_pos == 0 || space_pos == std::string_view::npos || space_pos > colon_pos) return;
+    std::string key(s.data(), s.data() + space_pos);
+
+    const size_t value_pos = s.find_first_not_of(' ', colon_pos + 1);
+    if (value_pos == std::string_view::npos) return;
+
+    const double value = strtod(s.data() + value_pos, nullptr /* end */);
+    m[std::move(key)] = value;
+}
+
+} // namespace
+
+namespace android::mediautils {
+
+std::string getThreadSchedAsString(pid_t tid) {
+    const pid_t pid = getpid();
+    const std::string path = std::string("/proc/").append(std::to_string(pid))
+            .append("/task/").append(std::to_string(tid)).append("/sched");
+    std::string sched;
+    (void)android::base::ReadFileToString(path.c_str(), &sched);
+    return sched;
+}
+
+std::map<std::string, double> parseThreadSchedString(const std::string& schedString) {
+    std::map<std::string, double> m;
+    if (schedString.empty()) return m;
+    std::vector<std::string> stringlist = android::base::Split(schedString, "\n");
+
+    //  OK we use values not strings... m["summary"] = stringlist[0];
+    for (size_t i = 2; i < stringlist.size(); ++i) {
+        processLine(stringlist[i], m);
+    }
+    return m;
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index e212794..da199c4 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -64,11 +64,27 @@
     return true;
 }
 
-bool ProcessInfo::isValidPid(int pid) {
+bool ProcessInfo::isPidTrusted(int pid) {
+    return isPidUidTrusted(pid, -1);
+}
+
+bool ProcessInfo::isPidUidTrusted(int pid, int uid) {
     int callingPid = IPCThreadState::self()->getCallingPid();
     int callingUid = IPCThreadState::self()->getCallingUid();
-    // Trust it if this is called from the same process otherwise pid has to match the calling pid.
-    return (callingPid == getpid()) || (callingPid == pid) || (callingUid == AID_MEDIA);
+    // Always trust when the caller is acting on their own behalf.
+    if (pid == callingPid && (uid == callingUid || uid == -1)) { // UID can be optional
+        return true;
+    }
+    // Implicitly trust when the caller is our own process.
+    if (callingPid == getpid()) {
+        return true;
+    }
+    // Implicitly trust when a media process is calling.
+    if (callingUid == AID_MEDIA) {
+        return true;
+    }
+    // Otherwise, allow the caller to act as another process when the caller has permissions.
+    return checkCallingPermission(String16("android.permission.MEDIA_RESOURCE_OVERRIDE_PID"));
 }
 
 bool ProcessInfo::overrideProcessInfo(int pid, int procState, int oomScore) {
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 9c7b863..07f4529 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -45,6 +45,7 @@
 static const String16 sAndroidPermissionRecordAudio("android.permission.RECORD_AUDIO");
 static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
 static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
+static const String16 sCallAudioInterception("android.permission.CALL_AUDIO_INTERCEPTION");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
         const std::optional<String16> opPackageName, uid_t uid) {
@@ -71,6 +72,7 @@
   switch (source) {
     case AUDIO_SOURCE_HOTWORD:
       return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
+    case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
     case AUDIO_SOURCE_REMOTE_SUBMIX:
       return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
     case AUDIO_SOURCE_VOICE_DOWNLINK:
@@ -101,7 +103,11 @@
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
     myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
-    myAttributionSource.token = sp<BBinder>::make();
+    if (callerAttributionSource.token != nullptr) {
+        myAttributionSource.token = callerAttributionSource.token;
+    } else {
+        myAttributionSource.token = sp<BBinder>::make();
+    }
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
@@ -175,10 +181,14 @@
 
 bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
     if (isAudioServerOrRootUid(uid)) return true;
     static const String16 sCaptureAudioOutput("android.permission.CAPTURE_AUDIO_OUTPUT");
-    bool ok = PermissionCache::checkPermission(sCaptureAudioOutput, pid, uid);
+    // Use PermissionChecker, which includes some logic for allowing the isolated
+    // HotwordDetectionService to hold certain permissions.
+    permission::PermissionChecker permissionChecker;
+    bool ok = (permissionChecker.checkPermissionForPreflight(
+            sCaptureAudioOutput, attributionSource, String16(),
+            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED);
     if (!ok) ALOGV("Request requires android.permission.CAPTURE_AUDIO_OUTPUT");
     return ok;
 }
@@ -214,6 +224,17 @@
     return ok;
 }
 
+bool accessUltrasoundAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    uid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
+    if (isAudioServerOrRootUid(uid)) return true;
+    static const String16 sAccessUltrasound(
+        "android.permission.ACCESS_ULTRASOUND");
+    bool ok = PermissionCache::checkPermission(sAccessUltrasound, pid, uid);
+    if (!ok) ALOGE("Request requires android.permission.ACCESS_ULTRASOUND");
+    return ok;
+}
+
 bool captureHotwordAllowed(const AttributionSourceState& attributionSource) {
     // CAPTURE_AUDIO_HOTWORD permission implies RECORD_AUDIO permission
     bool ok = recordingAllowed(attributionSource);
@@ -304,6 +325,17 @@
     return ok;
 }
 
+bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
+
+    // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
+    bool ok = PermissionCache::checkPermission(sCallAudioInterception, pid, uid);
+    if (!ok) ALOGV("%s(): android.permission.CALL_AUDIO_INTERCEPTION denied for uid %d",
+        __func__, uid);
+    return ok;
+}
+
 AttributionSourceState getCallingAttributionSource() {
     AttributionSourceState attributionSource = AttributionSourceState();
     attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(
diff --git a/media/utils/ThreadSnapshot.cpp b/media/utils/ThreadSnapshot.cpp
new file mode 100644
index 0000000..382738e
--- /dev/null
+++ b/media/utils/ThreadSnapshot.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ThreadSnapshot"
+#include <utils/Log.h>
+#include <utils/Timers.h>
+#include <mediautils/ThreadSnapshot.h>
+
+#include <mediautils/Process.h>
+
+namespace android::mediautils {
+
+pid_t ThreadSnapshot::getTid() const {
+    std::lock_guard lg(mLock);
+    return mState.mTid;
+}
+
+void ThreadSnapshot::setTid(pid_t tid) {
+    std::lock_guard lg(mLock);
+    if (mState.mTid == tid) return;
+    mState.reset(tid);
+}
+
+void ThreadSnapshot::reset() {
+    std::lock_guard lg(mLock);
+    mState.reset(mState.mTid);
+}
+
+void ThreadSnapshot::onBegin() {
+    std::string sched = getThreadSchedAsString(getTid()); // tid could race here,
+                                                          // accept as benign.
+    std::lock_guard lg(mLock);
+    mState.onBegin(std::move(sched));
+}
+
+void ThreadSnapshot::onEnd() {
+    std::lock_guard lg(mLock);
+    mState.onEnd();
+}
+
+std::string ThreadSnapshot::toString() const {
+    // Make a local copy of the stats data under lock.
+    State state;
+    {
+        std::lock_guard lg(mLock);
+        state = mState;
+    }
+    return state.toString();
+}
+
+void ThreadSnapshot::State::reset(pid_t tid) {
+    mTid = tid;
+    mBeginTimeNs = -2;
+    mEndTimeNs = -1;
+    mCumulativeTimeNs = 0;
+    mBeginSched.clear();
+}
+
+void ThreadSnapshot::State::onBegin(std::string sched) {
+    if (mBeginTimeNs < mEndTimeNs) {
+        mBeginTimeNs = systemTime();
+        mBeginSched = std::move(sched);
+    }
+}
+
+void ThreadSnapshot::State::onEnd() {
+    if (mEndTimeNs < mBeginTimeNs) {
+        mEndTimeNs = systemTime();
+        mCumulativeTimeNs += mEndTimeNs - mBeginTimeNs;
+    }
+}
+
+std::string ThreadSnapshot::State::toString() const {
+    if (mBeginTimeNs < 0) return {};  // never begun.
+
+    // compute time intervals.
+    const int64_t nowNs = systemTime();
+    int64_t cumulativeTimeNs = mCumulativeTimeNs;
+    int64_t diffNs = mEndTimeNs - mBeginTimeNs; // if onEnd() isn't matched, diffNs < 0.
+    if (diffNs < 0) {
+        diffNs = nowNs - mBeginTimeNs;
+        cumulativeTimeNs += diffNs;
+    }
+    // normalization for rate variables
+    const double lastRunPerSec =  1e9 / diffNs;
+    const double totalPerSec = 1e9 / cumulativeTimeNs;
+
+    // HANDLE THE SCHEDULER STATISTICS HERE
+    // current and differential statistics for the scheduler.
+    std::string schedNow = getThreadSchedAsString(mTid);
+    const auto schedMapThen = parseThreadSchedString(mBeginSched);
+    const auto schedMapNow = parseThreadSchedString(schedNow);
+    static const char * schedDiffKeyList[] = {
+        "se.sum_exec_runtime",
+        "se.nr_migrations",
+        "se.statistics.wait_sum",
+        "se.statistics.wait_count",
+        "se.statistics.iowait_sum",
+        "se.statistics.iowait_count",
+        "se.statistics.nr_forced_migrations",
+        "nr_involuntary_switches",
+    };
+
+    // compute differential rate statistics.
+    std::string diffString;
+    for (const auto diffKey : schedDiffKeyList) {
+        if (auto itThen = schedMapThen.find(diffKey);
+                itThen != schedMapThen.end()) {
+
+            if (auto itNow = schedMapNow.find(diffKey);
+                    itNow != schedMapNow.end()) {
+                auto diff = itNow->second - itThen->second;
+                diff *= lastRunPerSec;
+                auto total = itNow->second * totalPerSec;
+                diffString.append(diffKey).append("  last-run:")
+                        .append(std::to_string(diff))
+                        .append("  cumulative:")
+                        .append(std::to_string(total))
+                        .append("\n");
+            }
+        }
+    }
+
+    if (!diffString.empty()) {
+        schedNow.append("*** per second stats ***\n").append(diffString);
+    }
+
+    // Return snapshot string.
+    return schedNow;
+}
+
+} // android::mediautils
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 819e146..0848eb3 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -16,12 +16,80 @@
 
 #define LOG_TAG "TimeCheck"
 
-#include <utils/Log.h>
-#include <mediautils/TimeCheck.h>
+#include <optional>
+
+#include <android-base/logging.h>
+#include <audio_utils/clock.h>
 #include <mediautils/EventLog.h>
+#include <mediautils/MethodStatistics.h>
+#include <mediautils/TimeCheck.h>
+#include <utils/Log.h>
 #include "debuggerd/handler.h"
 
-namespace android {
+namespace android::mediautils {
+
+/**
+ * Returns the std::string "HH:MM:SS.MSc" from a system_clock time_point.
+ */
+std::string formatTime(std::chrono::system_clock::time_point t) {
+    auto time_string = audio_utils_time_string_from_ns(
+            std::chrono::nanoseconds(t.time_since_epoch()).count());
+
+    // The time string is 19 characters (including null termination).
+    // Example: "03-27 16:47:06.187"
+    //           MM DD HH MM SS MS
+    // We offset by 6 to get HH:MM:SS.MSc
+    //
+    return time_string.time + 6; // offset to remove month/day.
+}
+
+/**
+ * Finds the end of the common time prefix.
+ *
+ * This is as an option to remove the common time prefix to avoid
+ * unnecessary duplicated strings.
+ *
+ * \param time1 a time string
+ * \param time2 a time string
+ * \return      the position where the common time prefix ends. For abbreviated
+ *              printing of time2, offset the character pointer by this position.
+ */
+static size_t commonTimePrefixPosition(std::string_view time1, std::string_view time2) {
+    const size_t endPos = std::min(time1.size(), time2.size());
+    size_t i;
+
+    // Find location of the first mismatch between strings
+    for (i = 0; ; ++i) {
+        if (i == endPos) {
+            return i; // strings match completely to the length of one of the strings.
+        }
+        if (time1[i] != time2[i]) {
+            break;
+        }
+        if (time1[i] == '\0') {
+            return i; // "printed" strings match completely.  No need to check further.
+        }
+    }
+
+    // Go backwards until we find a delimeter or space.
+    for (; i > 0
+           && isdigit(time1[i]) // still a number
+           && time1[i - 1] != ' '
+         ; --i) {
+    }
+    return i;
+}
+
+/**
+ * Returns the unique suffix of time2 that isn't present in time1.
+ *
+ * If time2 is identical to time1, then an empty string_view is returned.
+ * This method is used to elide the common prefix when printing times.
+ */
+std::string_view timeSuffix(std::string_view time1, std::string_view time2) {
+    const size_t pos = commonTimePrefixPosition(time1, time2);
+    return time2.substr(pos);
+}
 
 // Audio HAL server pids vector used to generate audio HAL processes tombstone
 // when audioserver watchdog triggers.
@@ -36,7 +104,7 @@
 void TimeCheck::accessAudioHalPids(std::vector<pid_t>* pids, bool update) {
     static constexpr int kNumAudioHalPidsVectors = 3;
     static std::vector<pid_t> audioHalPids[kNumAudioHalPidsVectors];
-    static std::atomic<int> curAudioHalPids = 0;
+    static std::atomic<unsigned> curAudioHalPids = 0;
 
     if (update) {
         audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
@@ -58,84 +126,121 @@
 }
 
 /* static */
-sp<TimeCheck::TimeCheckThread> TimeCheck::getTimeCheckThread()
-{
-    static sp<TimeCheck::TimeCheckThread> sTimeCheckThread = new TimeCheck::TimeCheckThread();
+TimerThread& TimeCheck::getTimeCheckThread() {
+    static TimerThread sTimeCheckThread{};
     return sTimeCheckThread;
 }
 
-TimeCheck::TimeCheck(const char *tag, uint32_t timeoutMs)
-    : mEndTimeNs(getTimeCheckThread()->startMonitoring(tag, timeoutMs))
-{
+/* static */
+std::string TimeCheck::toString() {
+    // note pending and retired are individually locked for maximum concurrency,
+    // snapshot is not instantaneous at a single time.
+    return getTimeCheckThread().toString();
 }
 
+TimeCheck::TimeCheck(std::string tag, OnTimerFunc&& onTimer, uint32_t timeoutMs,
+        bool crashOnTimeout)
+    : mTimeCheckHandler(new TimeCheckHandler{
+            std::move(tag), std::move(onTimer), crashOnTimeout,
+            std::chrono::system_clock::now(), gettid()})
+    , mTimerHandle(timeoutMs == 0
+              ? getTimeCheckThread().trackTask(mTimeCheckHandler->tag)
+              : getTimeCheckThread().scheduleTask(
+                      mTimeCheckHandler->tag,
+                      // Pass in all the arguments by value to this task for safety.
+                      // The thread could call the callback before the constructor is finished.
+                      // The destructor is not blocked on callback.
+                      [ timeCheckHandler = mTimeCheckHandler ] {
+                          timeCheckHandler->onTimeout();
+                      },
+                      std::chrono::milliseconds(timeoutMs))) {}
+
 TimeCheck::~TimeCheck() {
-    getTimeCheckThread()->stopMonitoring(mEndTimeNs);
-}
-
-TimeCheck::TimeCheckThread::~TimeCheckThread()
-{
-    AutoMutex _l(mMutex);
-    requestExit();
-    mMonitorRequests.clear();
-    mCond.signal();
-}
-
-nsecs_t TimeCheck::TimeCheckThread::startMonitoring(const char *tag, uint32_t timeoutMs) {
-    Mutex::Autolock _l(mMutex);
-    nsecs_t endTimeNs = systemTime() + milliseconds(timeoutMs);
-    for (; mMonitorRequests.indexOfKey(endTimeNs) >= 0; ++endTimeNs);
-    mMonitorRequests.add(endTimeNs, tag);
-    mCond.signal();
-    return endTimeNs;
-}
-
-void TimeCheck::TimeCheckThread::stopMonitoring(nsecs_t endTimeNs) {
-    Mutex::Autolock _l(mMutex);
-    mMonitorRequests.removeItem(endTimeNs);
-    mCond.signal();
-}
-
-bool TimeCheck::TimeCheckThread::threadLoop()
-{
-    status_t status = TIMED_OUT;
-    {
-        AutoMutex _l(mMutex);
-
-        if (exitPending()) {
-            return false;
-        }
-
-        nsecs_t endTimeNs = INT64_MAX;
-        const char *tag = "<unspecified>";
-        // KeyedVector mMonitorRequests is ordered so take first entry as next timeout
-        if (mMonitorRequests.size() != 0) {
-            endTimeNs = mMonitorRequests.keyAt(0);
-            tag = mMonitorRequests.valueAt(0);
-        }
-
-        const nsecs_t waitTimeNs = endTimeNs - systemTime();
-        if (waitTimeNs > 0) {
-            status = mCond.waitRelative(mMutex, waitTimeNs);
-        }
-        if (status != NO_ERROR) {
-            // Generate audio HAL processes tombstones and allow time to complete
-            // before forcing restart
-            std::vector<pid_t> pids = getAudioHalPids();
-            if (pids.size() != 0) {
-                for (const auto& pid : pids) {
-                    ALOGI("requesting tombstone for pid: %d", pid);
-                    sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
-                }
-                sleep(1);
-            } else {
-                ALOGI("No HAL process pid available, skipping tombstones");
-            }
-            LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag);
-            LOG_ALWAYS_FATAL("TimeCheck timeout for %s", tag);
-        }
+    if (mTimeCheckHandler) {
+        mTimeCheckHandler->onCancel(mTimerHandle);
     }
-    return true;
 }
 
-}; // namespace android
+void TimeCheck::TimeCheckHandler::onCancel(TimerThread::Handle timerHandle) const
+{
+    if (TimeCheck::getTimeCheckThread().cancelTask(timerHandle) && onTimer) {
+        const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+        onTimer(false /* timeout */,
+                std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                        endTime - startTime).count());
+    }
+}
+
+void TimeCheck::TimeCheckHandler::onTimeout() const
+{
+    const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+    if (onTimer) {
+        onTimer(true /* timeout */,
+                std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                        endTime - startTime).count());
+    }
+
+    if (!crashOnTimeout) return;
+
+    // Generate the TimerThread summary string early before sending signals to the
+    // HAL processes which can affect thread behavior.
+    const std::string summary = getTimeCheckThread().toString(4 /* retiredCount */);
+
+    // Generate audio HAL processes tombstones and allow time to complete
+    // before forcing restart
+    std::vector<pid_t> pids = TimeCheck::getAudioHalPids();
+    std::string halPids = "HAL pids [ ";
+    if (pids.size() != 0) {
+        for (const auto& pid : pids) {
+            ALOGI("requesting tombstone for pid: %d", pid);
+            halPids.append(std::to_string(pid)).append(" ");
+            sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
+        }
+        sleep(1);
+    } else {
+        ALOGI("No HAL process pid available, skipping tombstones");
+    }
+    halPids.append("]");
+
+    LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag.c_str());
+
+    // Create abort message string - caution: this can be very large.
+    const std::string abortMessage = std::string("TimeCheck timeout for ")
+            .append(tag)
+            .append(" scheduled ").append(formatTime(startTime))
+            .append(" on thread ").append(std::to_string(tid)).append("\n")
+            .append(halPids).append("\n")
+            .append(summary);
+
+    // Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
+    // Log message text may be truncated to less than an
+    // implementation-specific limit (1023 bytes).
+    //
+    // Here, we send the string through android-base/logging.h LOG()
+    // to avoid the size limitation. LOG(FATAL) does an abort whereas
+    // LOG(FATAL_WITHOUT_ABORT) does not abort.
+
+    LOG(FATAL) << abortMessage;
+}
+
+// Automatically create a TimeCheck class for a class and method.
+// This is used for Audio HIDL support.
+mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
+        std::string_view className, std::string_view methodName) {
+    std::shared_ptr<MethodStatistics<std::string>> statistics =
+            mediautils::getStatisticsForClass(className);
+    if (!statistics) return {}; // empty TimeCheck.
+    return mediautils::TimeCheck(
+            std::string(className).append("::").append(methodName),
+            [ clazz = std::string(className), method = std::string(methodName),
+              stats = std::move(statistics) ]
+            (bool timeout, float elapsedMs) {
+                    if (timeout) {
+                        ; // ignored, there is no timeout value.
+                    } else {
+                        stats->event(method, elapsedMs);
+                    }
+            }, 0 /* timeoutMs */);
+}
+
+}  // namespace android::mediautils
diff --git a/media/utils/TimerThread-test.cpp b/media/utils/TimerThread-test.cpp
new file mode 100644
index 0000000..93cd64c
--- /dev/null
+++ b/media/utils/TimerThread-test.cpp
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <chrono>
+#include <thread>
+#include <gtest/gtest.h>
+#include <mediautils/TimerThread.h>
+
+using namespace std::chrono_literals;
+using namespace android::mediautils;
+
+namespace {
+
+constexpr auto kJitter = 10ms;
+
+// Each task written by *ToString() will start with a left brace.
+constexpr char REQUEST_START = '{';
+
+inline size_t countChars(std::string_view s, char c) {
+    return std::count(s.begin(), s.end(), c);
+}
+
+TEST(TimerThread, Basic) {
+    std::atomic<bool> taskRan = false;
+    TimerThread thread;
+    thread.scheduleTask("Basic", [&taskRan] { taskRan = true; }, 100ms);
+    std::this_thread::sleep_for(100ms - kJitter);
+    ASSERT_FALSE(taskRan);
+    std::this_thread::sleep_for(2 * kJitter);
+    ASSERT_TRUE(taskRan);
+    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+TEST(TimerThread, Cancel) {
+    std::atomic<bool> taskRan = false;
+    TimerThread thread;
+    TimerThread::Handle handle =
+            thread.scheduleTask("Cancel", [&taskRan] { taskRan = true; }, 100ms);
+    std::this_thread::sleep_for(100ms - kJitter);
+    ASSERT_FALSE(taskRan);
+    ASSERT_TRUE(thread.cancelTask(handle));
+    std::this_thread::sleep_for(2 * kJitter);
+    ASSERT_FALSE(taskRan);
+    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+TEST(TimerThread, CancelAfterRun) {
+    std::atomic<bool> taskRan = false;
+    TimerThread thread;
+    TimerThread::Handle handle =
+            thread.scheduleTask("CancelAfterRun", [&taskRan] { taskRan = true; }, 100ms);
+    std::this_thread::sleep_for(100ms + kJitter);
+    ASSERT_TRUE(taskRan);
+    ASSERT_FALSE(thread.cancelTask(handle));
+    ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+TEST(TimerThread, MultipleTasks) {
+    std::array<std::atomic<bool>, 6> taskRan{};
+    TimerThread thread;
+
+    auto startTime = std::chrono::steady_clock::now();
+
+    thread.scheduleTask("0", [&taskRan] { taskRan[0] = true; }, 300ms);
+    thread.scheduleTask("1", [&taskRan] { taskRan[1] = true; }, 100ms);
+    thread.scheduleTask("2", [&taskRan] { taskRan[2] = true; }, 200ms);
+    thread.scheduleTask("3", [&taskRan] { taskRan[3] = true; }, 400ms);
+    auto handle4 = thread.scheduleTask("4", [&taskRan] { taskRan[4] = true; }, 200ms);
+    thread.scheduleTask("5", [&taskRan] { taskRan[5] = true; }, 200ms);
+
+    // 6 tasks pending
+    ASSERT_EQ(6, countChars(thread.pendingToString(), REQUEST_START));
+    // 0 tasks completed
+    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+
+    // Task 1 should trigger around 100ms.
+    std::this_thread::sleep_until(startTime + 100ms - kJitter);
+    ASSERT_FALSE(taskRan[0]);
+    ASSERT_FALSE(taskRan[1]);
+    ASSERT_FALSE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_FALSE(taskRan[5]);
+
+    std::this_thread::sleep_until(startTime + 100ms + kJitter);
+    ASSERT_FALSE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_FALSE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_FALSE(taskRan[5]);
+
+    // Cancel task 4 before it gets a chance to run.
+    thread.cancelTask(handle4);
+
+    // Tasks 2 and 5 should trigger around 200ms.
+    std::this_thread::sleep_until(startTime + 200ms - kJitter);
+    ASSERT_FALSE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_FALSE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_FALSE(taskRan[5]);
+
+    std::this_thread::sleep_until(startTime + 200ms + kJitter);
+    ASSERT_FALSE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_TRUE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_TRUE(taskRan[5]);
+
+    // Task 0 should trigger around 300ms.
+    std::this_thread::sleep_until(startTime + 300ms - kJitter);
+    ASSERT_FALSE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_TRUE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_TRUE(taskRan[5]);
+
+    std::this_thread::sleep_until(startTime + 300ms + kJitter);
+    ASSERT_TRUE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_TRUE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_TRUE(taskRan[5]);
+
+    // 1 task pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks ran and 1 cancelled
+    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+
+    // Task 3 should trigger around 400ms.
+    std::this_thread::sleep_until(startTime + 400ms - kJitter);
+    ASSERT_TRUE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_TRUE(taskRan[2]);
+    ASSERT_FALSE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_TRUE(taskRan[5]);
+
+    // 4 tasks ran and 1 cancelled
+    ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+
+    std::this_thread::sleep_until(startTime + 400ms + kJitter);
+    ASSERT_TRUE(taskRan[0]);
+    ASSERT_TRUE(taskRan[1]);
+    ASSERT_TRUE(taskRan[2]);
+    ASSERT_TRUE(taskRan[3]);
+    ASSERT_FALSE(taskRan[4]);
+    ASSERT_TRUE(taskRan[5]);
+
+    // 0 tasks pending
+    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+    // 5 tasks ran and 1 cancelled
+    ASSERT_EQ(5 + 1, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+TEST(TimerThread, TrackedTasks) {
+    TimerThread thread;
+
+    auto handle0 = thread.trackTask("0");
+    auto handle1 = thread.trackTask("1");
+    auto handle2 = thread.trackTask("2");
+
+    // 3 tasks pending
+    ASSERT_EQ(3, countChars(thread.pendingToString(), REQUEST_START));
+    // 0 tasks retired
+    ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle0));
+    ASSERT_TRUE(thread.cancelTask(handle1));
+
+    // 1 task pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+    // handle1 is stale, cancel returns false.
+    ASSERT_FALSE(thread.cancelTask(handle1));
+
+    // 1 task pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+    // Add another tracked task.
+    auto handle3 = thread.trackTask("3");
+
+    // 2 tasks pending
+    ASSERT_EQ(2, countChars(thread.pendingToString(), REQUEST_START));
+    // 2 tasks retired
+    ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle2));
+
+    // 1 tasks pending
+    ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+    // 3 tasks retired
+    ASSERT_EQ(3, countChars(thread.retiredToString(), REQUEST_START));
+
+    ASSERT_TRUE(thread.cancelTask(handle3));
+
+    // 0 tasks pending
+    ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+    // 4 tasks retired
+    ASSERT_EQ(4, countChars(thread.retiredToString(), REQUEST_START));
+}
+
+}  // namespace
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
new file mode 100644
index 0000000..6de6b13
--- /dev/null
+++ b/media/utils/TimerThread.cpp
@@ -0,0 +1,359 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TimerThread"
+
+#include <optional>
+#include <sstream>
+#include <unistd.h>
+#include <vector>
+
+#include <mediautils/MediaUtilsDelayed.h>
+#include <mediautils/TimerThread.h>
+#include <utils/ThreadDefs.h>
+
+namespace android::mediautils {
+
+extern std::string formatTime(std::chrono::system_clock::time_point t);
+extern std::string_view timeSuffix(std::string_view time1, std::string_view time2);
+
+TimerThread::Handle TimerThread::scheduleTask(
+        std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout) {
+    const auto now = std::chrono::system_clock::now();
+    std::shared_ptr<const Request> request{
+            new Request{ now, now + timeout, gettid(), std::move(tag) }};
+    return mMonitorThread.add(std::move(request), std::move(func), timeout);
+}
+
+TimerThread::Handle TimerThread::trackTask(std::string tag) {
+    const auto now = std::chrono::system_clock::now();
+    std::shared_ptr<const Request> request{
+            new Request{ now, now, gettid(), std::move(tag) }};
+    return mNoTimeoutMap.add(std::move(request));
+}
+
+bool TimerThread::cancelTask(Handle handle) {
+    std::shared_ptr<const Request> request = mNoTimeoutMap.isValidHandle(handle) ?
+             mNoTimeoutMap.remove(handle) : mMonitorThread.remove(handle);
+    if (!request) return false;
+    mRetiredQueue.add(std::move(request));
+    return true;
+}
+
+std::string TimerThread::toString(size_t retiredCount) const {
+    // Note: These request queues are snapshot very close together but
+    // not at "identical" times as we don't use a class-wide lock.
+
+    std::vector<std::shared_ptr<const Request>> timeoutRequests;
+    std::vector<std::shared_ptr<const Request>> retiredRequests;
+    mTimeoutQueue.copyRequests(timeoutRequests);
+    mRetiredQueue.copyRequests(retiredRequests, retiredCount);
+    std::vector<std::shared_ptr<const Request>> pendingRequests =
+        getPendingRequests();
+
+    struct Analysis analysis = analyzeTimeout(timeoutRequests, pendingRequests);
+    std::string analysisSummary;
+    if (!analysis.summary.empty()) {
+        analysisSummary = std::string("\nanalysis [ ").append(analysis.summary).append(" ]");
+    }
+    std::string timeoutStack;
+    if (analysis.timeoutTid != -1) {
+        timeoutStack = std::string("\ntimeout(")
+                .append(std::to_string(analysis.timeoutTid)).append(") callstack [\n")
+                .append(getCallStackStringForTid(analysis.timeoutTid)).append("]");
+    }
+    std::string blockedStack;
+    if (analysis.HALBlockedTid != -1) {
+        blockedStack = std::string("\nblocked(")
+                .append(std::to_string(analysis.HALBlockedTid)).append(")  callstack [\n")
+                .append(getCallStackStringForTid(analysis.HALBlockedTid)).append("]");
+    }
+
+    return std::string("now ")
+            .append(formatTime(std::chrono::system_clock::now()))
+            .append(analysisSummary)
+            .append("\ntimeout [ ")
+            .append(requestsToString(timeoutRequests))
+            .append(" ]\npending [ ")
+            .append(requestsToString(pendingRequests))
+            .append(" ]\nretired [ ")
+            .append(requestsToString(retiredRequests))
+            .append(" ]")
+            .append(timeoutStack)
+            .append(blockedStack);
+}
+
+// A HAL method is where the substring "Hidl" is in the class name.
+// The tag should look like: ... Hidl ... :: ...
+// When the audio HAL is updated to AIDL perhaps we will use instead
+// a global directory of HAL classes.
+//
+// See MethodStatistics.cpp:
+// mediautils::getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL)
+//
+/* static */
+bool TimerThread::isRequestFromHal(const std::shared_ptr<const Request>& request) {
+    const size_t hidlPos = request->tag.find("Hidl");
+    if (hidlPos == std::string::npos) return false;
+    // should be a separator afterwards Hidl which indicates the string was in the class.
+    const size_t separatorPos = request->tag.find("::", hidlPos);
+    return separatorPos != std::string::npos;
+}
+
+/* static */
+struct TimerThread::Analysis TimerThread::analyzeTimeout(
+    const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
+    const std::vector<std::shared_ptr<const Request>>& pendingRequests) {
+
+    if (timeoutRequests.empty() || pendingRequests.empty()) return {}; // nothing to say.
+
+    // for now look at last timeout (in our case, the only timeout)
+    const std::shared_ptr<const Request> timeout = timeoutRequests.back();
+
+    // pending Requests that are problematic.
+    std::vector<std::shared_ptr<const Request>> pendingExact;
+    std::vector<std::shared_ptr<const Request>> pendingPossible;
+
+    // We look at pending requests that were scheduled no later than kDuration
+    // after the timeout request. This prevents false matches with calls
+    // that naturally block for a short period of time
+    // such as HAL write() and read().
+    //
+    auto constexpr kDuration = std::chrono::milliseconds(1000);
+    for (const auto& pending : pendingRequests) {
+        // If the pending tid is the same as timeout tid, problem identified.
+        if (pending->tid == timeout->tid) {
+            pendingExact.emplace_back(pending);
+            continue;
+        }
+
+        // if the pending tid is scheduled within time limit
+        if (pending->scheduled - timeout->scheduled < kDuration) {
+            pendingPossible.emplace_back(pending);
+        }
+    }
+
+    struct Analysis analysis{};
+
+    analysis.timeoutTid = timeout->tid;
+    std::string& summary = analysis.summary;
+    if (!pendingExact.empty()) {
+        const auto& request = pendingExact.front();
+        const bool hal = isRequestFromHal(request);
+
+        if (hal) {
+            summary = std::string("Blocked directly due to HAL call: ")
+                .append(request->toString());
+        }
+    }
+    if (summary.empty() && !pendingPossible.empty()) {
+        for (const auto& request : pendingPossible) {
+            const bool hal = isRequestFromHal(request);
+            if (hal) {
+                // The first blocked call is the most likely one.
+                // Recent calls might be temporarily blocked
+                // calls such as write() or read() depending on kDuration.
+                summary = std::string("Blocked possibly due to HAL call: ")
+                    .append(request->toString());
+                analysis.HALBlockedTid = request->tid;
+            }
+       }
+    }
+    return analysis;
+}
+
+std::vector<std::shared_ptr<const TimerThread::Request>> TimerThread::getPendingRequests() const {
+    constexpr size_t kEstimatedPendingRequests = 8;  // approx 128 byte alloc.
+    std::vector<std::shared_ptr<const Request>> pendingRequests;
+    pendingRequests.reserve(kEstimatedPendingRequests); // preallocate vector out of lock.
+
+    // following are internally locked calls, which add to our local pendingRequests.
+    mMonitorThread.copyRequests(pendingRequests);
+    mNoTimeoutMap.copyRequests(pendingRequests);
+
+    // Sort in order of scheduled time.
+    std::sort(pendingRequests.begin(), pendingRequests.end(),
+        [](const std::shared_ptr<const Request>& r1,
+           const std::shared_ptr<const Request>& r2) {
+               return r1->scheduled < r2->scheduled;
+           });
+    return pendingRequests;
+}
+
+std::string TimerThread::pendingToString() const {
+    return requestsToString(getPendingRequests());
+}
+
+std::string TimerThread::retiredToString(size_t n) const {
+    std::vector<std::shared_ptr<const Request>> retiredRequests;
+    mRetiredQueue.copyRequests(retiredRequests, n);
+
+    // Dump to string
+    return requestsToString(retiredRequests);
+}
+
+std::string TimerThread::timeoutToString(size_t n) const {
+    std::vector<std::shared_ptr<const Request>> timeoutRequests;
+    mTimeoutQueue.copyRequests(timeoutRequests, n);
+
+    // Dump to string
+    return requestsToString(timeoutRequests);
+}
+
+std::string TimerThread::Request::toString() const {
+    const auto scheduledString = formatTime(scheduled);
+    const auto deadlineString = formatTime(deadline);
+    return std::string(tag)
+        .append(" scheduled ").append(scheduledString)
+        .append(" deadline ").append(timeSuffix(scheduledString, deadlineString))
+        .append(" tid ").append(std::to_string(tid));
+}
+
+void TimerThread::RequestQueue::add(std::shared_ptr<const Request> request) {
+    std::lock_guard lg(mRQMutex);
+    mRequestQueue.emplace_back(std::chrono::system_clock::now(), std::move(request));
+    if (mRequestQueue.size() > mRequestQueueMax) {
+        mRequestQueue.pop_front();
+    }
+}
+
+void TimerThread::RequestQueue::copyRequests(
+        std::vector<std::shared_ptr<const Request>>& requests, size_t n) const {
+    std::lock_guard lg(mRQMutex);
+    const size_t size = mRequestQueue.size();
+    size_t i = n >=  size ? 0 : size - n;
+    for (; i < size; ++i) {
+        const auto &[time, request] = mRequestQueue[i];
+        requests.emplace_back(request);
+    }
+}
+
+bool TimerThread::NoTimeoutMap::isValidHandle(Handle handle) const {
+    return handle > getIndexedHandle(mNoTimeoutRequests);
+}
+
+TimerThread::Handle TimerThread::NoTimeoutMap::add(std::shared_ptr<const Request> request) {
+    std::lock_guard lg(mNTMutex);
+    // A unique handle is obtained by mNoTimeoutRequests.fetch_add(1),
+    // This need not be under a lock, but we do so anyhow.
+    const Handle handle = getIndexedHandle(mNoTimeoutRequests++);
+    mMap[handle] = request;
+    return handle;
+}
+
+std::shared_ptr<const TimerThread::Request> TimerThread::NoTimeoutMap::remove(Handle handle) {
+    std::lock_guard lg(mNTMutex);
+    auto it = mMap.find(handle);
+    if (it == mMap.end()) return {};
+    auto request = it->second;
+    mMap.erase(it);
+    return request;
+}
+
+void TimerThread::NoTimeoutMap::copyRequests(
+        std::vector<std::shared_ptr<const Request>>& requests) const {
+    std::lock_guard lg(mNTMutex);
+    for (const auto &[handle, request] : mMap) {
+        requests.emplace_back(request);
+    }
+}
+
+TimerThread::Handle TimerThread::MonitorThread::getUniqueHandle_l(
+        std::chrono::milliseconds timeout) {
+    // To avoid key collisions, advance by 1 tick until the key is unique.
+    auto deadline = std::chrono::steady_clock::now() + timeout;
+    for (; mMonitorRequests.find(deadline) != mMonitorRequests.end();
+         deadline += std::chrono::steady_clock::duration(1))
+        ;
+    return deadline;
+}
+
+TimerThread::MonitorThread::MonitorThread(RequestQueue& timeoutQueue)
+        : mTimeoutQueue(timeoutQueue)
+        , mThread([this] { threadFunc(); }) {
+     pthread_setname_np(mThread.native_handle(), "TimerThread");
+     pthread_setschedprio(mThread.native_handle(), PRIORITY_URGENT_AUDIO);
+}
+
+TimerThread::MonitorThread::~MonitorThread() {
+    {
+        std::lock_guard _l(mMutex);
+        mShouldExit = true;
+        mCond.notify_all();
+    }
+    mThread.join();
+}
+
+void TimerThread::MonitorThread::threadFunc() {
+    std::unique_lock _l(mMutex);
+    while (!mShouldExit) {
+        if (!mMonitorRequests.empty()) {
+            Handle nextDeadline = mMonitorRequests.begin()->first;
+            if (nextDeadline < std::chrono::steady_clock::now()) {
+                // Deadline has expired, handle the request.
+                {
+                    auto node = mMonitorRequests.extract(mMonitorRequests.begin());
+                    _l.unlock();
+                    // We add Request to retired queue early so that it can be dumped out.
+                    mTimeoutQueue.add(std::move(node.mapped().first));
+                    node.mapped().second(); // Caution: we don't hold lock here - but do we care?
+                                            // this is the timeout case!  We will crash soon,
+                                            // maybe before returning.
+                    // anything left over is released here outside lock.
+                }
+                // reacquire the lock - if something was added, we loop immediately to check.
+                _l.lock();
+                continue;
+            }
+            mCond.wait_until(_l, nextDeadline);
+        } else {
+            mCond.wait(_l);
+        }
+    }
+}
+
+TimerThread::Handle TimerThread::MonitorThread::add(
+        std::shared_ptr<const Request> request, std::function<void()>&& func,
+        std::chrono::milliseconds timeout) {
+    std::lock_guard _l(mMutex);
+    const Handle handle = getUniqueHandle_l(timeout);
+    mMonitorRequests.emplace(handle, std::make_pair(std::move(request), std::move(func)));
+    mCond.notify_all();
+    return handle;
+}
+
+std::shared_ptr<const TimerThread::Request> TimerThread::MonitorThread::remove(Handle handle) {
+    std::unique_lock ul(mMutex);
+    const auto it = mMonitorRequests.find(handle);
+    if (it == mMonitorRequests.end()) {
+        return {};
+    }
+    std::shared_ptr<const TimerThread::Request> request = std::move(it->second.first);
+    std::function<void()> func = std::move(it->second.second);
+    mMonitorRequests.erase(it);
+    ul.unlock();  // manually release lock here so func is released outside of lock.
+    return request;
+}
+
+void TimerThread::MonitorThread::copyRequests(
+        std::vector<std::shared_ptr<const Request>>& requests) const {
+    std::lock_guard lg(mMutex);
+    for (const auto &[deadline, monitorpair] : mMonitorRequests) {
+        requests.emplace_back(monitorpair.first);
+    }
+}
+
+}  // namespace android::mediautils
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index c1698dc..d26e6c2 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -18,6 +18,7 @@
         "libutils",
         "libbinder",
         "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
 
     cflags: [
@@ -31,11 +32,6 @@
         "bionic_libc_platform_headers",
         "libmedia_headers",
     ],
-
-    include_dirs: [
-        // For DEBUGGER_SIGNAL
-        "system/core/debuggerd/include",
-    ],
 }
 
 cc_fuzz {
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
index 6e52512..51e8d7a 100644
--- a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -17,7 +17,7 @@
 #include <fcntl.h>
 
 #include <functional>
-#include <type_traits>
+#include  <type_traits>
 
 #include <android/content/AttributionSourceState.h>
 #include "fuzzer/FuzzedDataProvider.h"
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
index eeb6ba6..7966469 100644
--- a/media/utils/fuzzers/TimeCheckFuzz.cpp
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -44,11 +44,11 @@
 
     // 2. We also have setAudioHalPids, which is populated with the pids set
     // above.
-    android::TimeCheck::setAudioHalPids(pids);
+    android::mediautils::TimeCheck::setAudioHalPids(pids);
     std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
 
     // 3. The constructor, which is fuzzed here:
-    android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+    android::mediautils::TimeCheck timeCheck(name.c_str(), {} /* onTimer */, timeoutMs);
     // We will leave some buffer to avoid sleeping too long
     uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
 
diff --git a/media/utils/include/mediautils/Library.h b/media/utils/include/mediautils/Library.h
new file mode 100644
index 0000000..19cfc11
--- /dev/null
+++ b/media/utils/include/mediautils/Library.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <dlfcn.h>
+#include <string>
+#include <unistd.h>
+
+namespace android::mediautils {
+
+/**
+ * Returns a shared pointer to the library instance.
+ *
+ * When the last reference to the library is removed, the library will be dlclose().
+ *
+ * Notes:
+ * 1) The Android bionic linker always uses RTLD_GLOBAL for executable linking
+ * which provides the symbols for other subsequent libraries.
+ *
+ * 2) RTLD_GLOBAL like RTLD_NODELETE disables unloading of the library
+ * when the reference count drops to zero.
+ *
+ * 3) RTLD_LOCAL is the default in the absence of RTLD_GLOBAL.
+ * RTLD_LOCAL may be ignored in some situations, for example:
+ * https://stackoverflow.com/questions/56808889/static-objects-destructed-before-dlclose
+ *
+ * 4) We default to use RTLD_LAZY to delay symbol relocations until needed.
+ * This flag may be ignored by Android.  RTLD_LAZY may allow
+ * unresolved symbols if not accessed, or symbols added later with another library
+ * loaded with RTLD_GLOBAL. See RTLD_NOW for comparison.
+ *
+ * 5) Avoid both staticly loading and dynamically loading the same library.
+ * This is known to cause double free issues as library symbols may map to
+ * the same location.  RTLD_DEEPBIND does not appear supported as of T.
+ * https://stackoverflow.com/questions/34073051/when-we-are-supposed-to-use-rtld-deepbind
+ * https://stackoverflow.com/questions/31209693/static-library-linked-two-times
+ *
+ * Details on Android linker and debugging here:
+ * See: adb shell setprop debug.ld.all dlerror,dlopen,dlsym
+ * See: https://android.googlesource.com/platform/bionic/+/master/android-changes-for-ndk-developers.md
+ *
+ * Some other relevant info:
+ * See: Soong double_loadable:true go/double_loadable
+ * See: https://en.wikipedia.org/wiki/One_Definition_Rule#Summary
+ *
+ * TODO(b/228093151): Consider moving to platform/system.
+ *
+ * \param libraryName
+ * \param flags one of the dlopen RTLD_* flags. https://linux.die.net/man/3/dlopen
+ * \return shared_ptr to the library. This will be nullptr if it isn't found.
+ */
+std::shared_ptr<void> loadLibrary(const char *libraryName, int flags = RTLD_LAZY);
+
+/**
+ * Returns a shared pointer to an object in the library
+ *
+ * The object will be a global variable or method in the library.
+ * The object reference counting is aliased to the library shared ptr.
+ *
+ * Note: If any internals of the shared library are exposed, for example by
+ * a method returning a pointer to library globals,
+ * or returning an object whose class definition is from the library,
+ * then the shared_ptr must be kept alive while such references to
+ * library internals exist to prevent library unloading.
+ *
+ * See usage of RTLD_NODELETE as a flag to prevent unloading.
+ *
+ * \param objectName of the library object.
+ * \param library a shared pointer to the library returned by loadLibrary().
+ * \return shared_ptr to the object, but whose refcount is
+ *         aliased to the library shared ptr.
+ */
+std::shared_ptr<void> getUntypedObjectFromLibrary(
+        const char *objectName, const std::shared_ptr<void>& library);
+
+/**
+ * Returns a shared pointer to an object in the library
+ *
+ * This is the template typed version of getUntypedObjectFromLibrary().
+ *
+ * \param objectName of the library object.
+ * \param library a shared pointer to the library
+ * \return shared_ptr to the object, but whose refcount is
+ *         aliased to the library shared ptr.
+ */
+template <typename T>
+std::shared_ptr<T> getObjectFromLibrary(
+        const char *objectName, const std::shared_ptr<void>& library) {
+    return std::static_pointer_cast<T>(getUntypedObjectFromLibrary(objectName, library));
+}
+
+} // android::mediautils
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/utils/include/mediautils/MediaUtilsDelayed.h
similarity index 64%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
copy to media/utils/include/mediautils/MediaUtilsDelayed.h
index 9e04e82..1583e60 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
+++ b/media/utils/include/mediautils/MediaUtilsDelayed.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,14 +13,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package android.media;
+
+#pragma once
+
+#include <string>
+#include <unistd.h>
+
+namespace android::mediautils {
+
+// These methods use lazy library loading.
 
 /**
- * {@hide}
+ * Returns a string callstack from the thread id tid.
  */
-@Backing(type="int")
-enum AudioEncapsulationMode {
-     NONE = 0,
-     ELEMENTARY_STREAM = 1,
-     HANDLE = 2,
-}
+std::string getCallStackStringForTid(pid_t tid);
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
new file mode 100644
index 0000000..700fbaa
--- /dev/null
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <mutex>
+#include <string>
+#include <vector>
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/Statistics.h>
+
+namespace android::mediautils {
+
+/**
+ * MethodStatistics is used to associate Binder codes
+ * with a method name and execution time statistics.
+ *
+ * This is used to track binder transaction times for
+ * AudioFlinger and AudioPolicy services.
+ *
+ * Here, Code is the enumeration type for the method
+ * lookup.
+ */
+template <typename Code>
+class MethodStatistics {
+public:
+    using FloatType = float;
+    using StatsType = audio_utils::Statistics<FloatType>;
+
+    /**
+     * Method statistics.
+     *
+     * Initialized with the Binder transaction list for tracking AudioFlinger
+     * and AudioPolicyManager execution statistics.
+     */
+    explicit MethodStatistics(
+            const std::initializer_list<std::pair<const Code, std::string>>& methodMap = {})
+        : mMethodMap{methodMap} {}
+
+    /**
+     * Adds a method event, typically execution time in ms.
+     */
+    void event(Code code, FloatType executeMs) {
+        std::lock_guard lg(mLock);
+        mStatisticsMap[code].add(executeMs);
+    }
+
+    /**
+     * Returns the name for the method code.
+     */
+    std::string getMethodForCode(Code code) const {
+        auto it = mMethodMap.find(code);
+        return it == mMethodMap.end() ? std::to_string((int)code) : it->second;
+    }
+
+    /**
+     * Returns the number of times the method was invoked by event().
+     */
+    size_t getMethodCount(Code code) const {
+        std::lock_guard lg(mLock);
+        auto it = mStatisticsMap.find(code);
+        return it == mStatisticsMap.end() ? 0 : it->second.getN();
+    }
+
+    /**
+     * Returns the statistics object for the method.
+     */
+    StatsType getStatistics(Code code) const {
+        std::lock_guard lg(mLock);
+        auto it = mStatisticsMap.find(code);
+        return it == mStatisticsMap.end() ? StatsType{} : it->second;
+    }
+
+    /**
+     * Dumps the current method statistics.
+     */
+    std::string dump() const {
+        std::stringstream ss;
+        std::lock_guard lg(mLock);
+        if constexpr (std::is_same_v<Code, std::string>) {
+            for (const auto &[code, stats] : mStatisticsMap) {
+                ss << code <<
+                        " n=" << stats.getN() << " " << stats.toString() << "\n";
+            }
+        } else /* constexpr */ {
+            for (const auto &[code, stats] : mStatisticsMap) {
+                ss << int(code) << " " << getMethodForCode(code) <<
+                        " n=" << stats.getN() << " " << stats.toString() << "\n";
+            }
+        }
+        return ss.str();
+    }
+
+private:
+    const std::map<Code, std::string> mMethodMap;
+    mutable std::mutex mLock;
+    std::map<Code, StatsType> mStatisticsMap GUARDED_BY(mLock);
+};
+
+// Managed Statistics support.
+// Supported Modules
+#define METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL "AudioHidl"
+
+// Returns a vector of class names for the module, or a nullptr if module not found.
+std::shared_ptr<std::vector<std::string>>
+getStatisticsClassesForModule(std::string_view moduleName);
+
+// Returns a statistics object for that class, or a nullptr if class not found.
+std::shared_ptr<MethodStatistics<std::string>>
+getStatisticsForClass(std::string_view className);
+
+// Only if used, requires IBinder.h to be included at the location of invocation.
+#define METHOD_STATISTICS_BINDER_CODE_NAMES(CODE_TYPE) \
+    {(CODE_TYPE)IBinder::PING_TRANSACTION , "ping"}, \
+    {(CODE_TYPE)IBinder::DUMP_TRANSACTION , "dump"}, \
+    {(CODE_TYPE)IBinder::SHELL_COMMAND_TRANSACTION , "shellCommand"}, \
+    {(CODE_TYPE)IBinder::INTERFACE_TRANSACTION , "getInterfaceDescriptor"}, \
+    {(CODE_TYPE)IBinder::SYSPROPS_TRANSACTION , "SYSPROPS_TRANSACTION"}, \
+    {(CODE_TYPE)IBinder::EXTENSION_TRANSACTION , "EXTENSION_TRANSACTION"}, \
+    {(CODE_TYPE)IBinder::DEBUG_PID_TRANSACTION , "DEBUG_PID_TRANSACTION"}, \
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/Process.h b/media/utils/include/mediautils/Process.h
new file mode 100644
index 0000000..d249c3a
--- /dev/null
+++ b/media/utils/include/mediautils/Process.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <string>
+#include <unistd.h>
+
+/*
+ * This header contains utilities to read the linux system /proc/pid files
+ *
+ * The format of this is not guaranteed to be stable, so use for diagnostic purposes only.
+ *
+ * The linux "proc" directory documentation:
+ * https://kernel.org/doc/Documentation/filesystems/proc.txt
+ * https://www.kernel.org/doc/html/latest/filesystems/proc.html?highlight=proc%20pid#chapter-3-per-process-parameters
+ */
+
+namespace android::mediautils {
+
+/**
+ * Return the thread schedule information for tid.
+ *
+ * String will be empty if the process does not have permission to
+ * access the /proc/pid tables, or if not on a Linux device.
+ *
+ * Linux scheduler documentation:
+ * https://www.kernel.org/doc/html/latest/scheduler/index.html
+ * https://man7.org/linux/man-pages/man7/sched.7.html
+ *
+ * Sample as follows:
+
+AudioOut_8D (10800, #threads: 36)
+-------------------------------------------------------------------
+se.exec_start                                :       8132077.598026
+se.vruntime                                  :        798689.872087
+se.sum_exec_runtime                          :        136466.957838
+se.nr_migrations                             :               132487
+se.statistics.sum_sleep_runtime              :       5629794.565945
+se.statistics.wait_start                     :             0.000000
+se.statistics.sleep_start                    :       8195727.586392
+se.statistics.block_start                    :             0.000000
+se.statistics.sleep_max                      :       1995665.869808
+se.statistics.block_max                      :             0.591675
+se.statistics.exec_max                       :             2.477580
+se.statistics.slice_max                      :             0.000000
+se.statistics.wait_max                       :             8.608642
+se.statistics.wait_sum                       :          4683.266835
+se.statistics.wait_count                     :               300964
+se.statistics.iowait_sum                     :             0.000000
+se.statistics.iowait_count                   :                    0
+se.statistics.nr_migrations_cold             :                    0
+se.statistics.nr_failed_migrations_affine    :                  297
+se.statistics.nr_failed_migrations_running   :                 1412
+se.statistics.nr_failed_migrations_hot       :                   96
+se.statistics.nr_forced_migrations           :                   26
+se.statistics.nr_wakeups                     :               281263
+se.statistics.nr_wakeups_sync                :                   84
+se.statistics.nr_wakeups_migrate             :               132322
+se.statistics.nr_wakeups_local               :                 2165
+se.statistics.nr_wakeups_remote              :               279098
+se.statistics.nr_wakeups_affine              :                    0
+se.statistics.nr_wakeups_affine_attempts     :                    0
+se.statistics.nr_wakeups_passive             :                    0
+se.statistics.nr_wakeups_idle                :                    0
+avg_atom                                     :             0.453434
+avg_per_cpu                                  :             1.030040
+nr_switches                                  :               300963
+nr_voluntary_switches                        :               281252
+nr_involuntary_switches                      :                19711
+se.load.weight                               :             73477120
+se.avg.load_sum                              :                   58
+se.avg.runnable_sum                          :                27648
+se.avg.util_sum                              :                21504
+se.avg.load_avg                              :                   48
+se.avg.runnable_avg                          :                    0
+se.avg.util_avg                              :                    0
+se.avg.last_update_time                      :        8132075824128
+se.avg.util_est.ewma                         :                    8
+se.avg.util_est.enqueued                     :                    1
+uclamp.min                                   :                    0
+uclamp.max                                   :                 1024
+effective uclamp.min                         :                    0
+effective uclamp.max                         :                 1024
+policy                                       :                    0
+prio                                         :                  101
+clock-delta                                  :                  163
+*/
+std::string getThreadSchedAsString(pid_t tid);
+
+/**
+ * Returns map for the raw thread schedule string.
+ */
+std::map<std::string, double> parseThreadSchedString(const std::string& schedString);
+
+/**
+ * Returns map for /proc/pid/task/tid/sched
+ */
+inline std::map<std::string, double> getThreadSchedAsMap(pid_t tid) {
+    return parseThreadSchedString(getThreadSchedAsString(tid));
+}
+
+// TODO: Extend to other /proc/pid file information.
+//
+// See "ps" command get_ps().
+// https://cs.android.com/android/platform/superproject/+/master:external/toybox/toys/posix/ps.c;l=707
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/ScopedStatistics.h b/media/utils/include/mediautils/ScopedStatistics.h
new file mode 100644
index 0000000..c5fc1e9
--- /dev/null
+++ b/media/utils/include/mediautils/ScopedStatistics.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "MethodStatistics.h"
+#include <chrono>
+#include <memory>
+#include <string>
+#include <utility>
+
+namespace android::mediautils {
+
+class ScopedStatistics {
+  public:
+    /**
+     * ScopedStatistics is a RAII way of obtaining
+     * execution time statistics for a scoped C++ block.
+     *
+     * It updates the MethodStatistics shared pointer parameter
+     * with the methodName parameter and the duration/lifetime of the
+     * ScopedStatistics object.
+     *
+     * Not thread-safe, but expected to run in a single execution
+     * thread, and there are no user serviceable parts exposed.
+     *
+     * Example:
+     *
+     * std::shared_ptr<mediautils::MethodStatistics<std::string>> stats =
+     *     std::make_shared<mediautils::MethodStatistics<std::string>>();
+     *
+     * // ...
+     * {
+     *    mediautils::ScopedStatistics scopedStatistics("MyClass:myMethod", stats);
+     *
+     *    // some work to be timed here - up to the end of the block.
+     * }
+     *
+     * \param methodName the methodname to use "ClassName::methodName"
+     * \param statistics a shared ptr to the MethodStatistics object to use.
+     */
+    ScopedStatistics(std::string methodName,
+               std::shared_ptr<mediautils::MethodStatistics<std::string>> statistics)
+        : mMethodName{std::move(methodName)}
+        , mStatistics{std::move(statistics)}
+        , mBegin{std::chrono::steady_clock::now()} {}
+
+    // No copy constructor.
+    ScopedStatistics(const ScopedStatistics& scopedStatistics) = delete;
+    ScopedStatistics& operator=(const ScopedStatistics& scopedStatistics) = delete;
+
+    ~ScopedStatistics() {
+        if (mStatistics) {
+            const float elapsedMs = std::chrono::duration_cast<std::chrono::nanoseconds>(
+                                            std::chrono::steady_clock::now() - mBegin)
+                                            .count() *
+                                    1e-6; // ns to ms.
+            mStatistics->event(mMethodName, elapsedMs);
+        }
+    }
+
+  private:
+    const std::string mMethodName;
+    const std::shared_ptr<mediautils::MethodStatistics<std::string>> mStatistics;
+    const std::chrono::steady_clock::time_point mBegin;
+};
+
+} // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 734313c..de20d55 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -61,6 +61,7 @@
 
 // Used for calls that should come from system server or internal.
 // Note: system server is multiprocess for multiple users.  audioserver is not.
+// Note: if this method is modified, also update the same method in SensorService.h.
 static inline bool isAudioServerOrSystemServerUid(uid_t uid) {
     return multiuser_get_app_id(uid) == AID_SYSTEM || uid == AID_AUDIOSERVER;
 }
@@ -95,6 +96,7 @@
 bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource);
 bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource);
 bool captureVoiceCommunicationOutputAllowed(const AttributionSourceState& attributionSource);
+bool accessUltrasoundAllowed(const AttributionSourceState& attributionSource);
 bool captureHotwordAllowed(const AttributionSourceState& attributionSource);
 bool settingsAllowed();
 bool modifyAudioRoutingAllowed();
@@ -104,6 +106,7 @@
 bool dumpAllowed();
 bool modifyPhoneStateAllowed(const AttributionSourceState& attributionSource);
 bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
+bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
 void purgePermissionCache();
 int32_t getOpForSource(audio_source_t source);
 
diff --git a/media/utils/include/mediautils/ThreadSnapshot.h b/media/utils/include/mediautils/ThreadSnapshot.h
new file mode 100644
index 0000000..c470822
--- /dev/null
+++ b/media/utils/include/mediautils/ThreadSnapshot.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <string>
+
+#include <android-base/thread_annotations.h>
+
+namespace android::mediautils {
+
+/**
+ * Collect Thread performance statistics.
+ *
+ * An onBegin() and onEnd() signal a continuous "run".
+ * Statistics are returned by toString().
+ */
+class ThreadSnapshot {
+public:
+    explicit ThreadSnapshot(pid_t tid = -1) { mState.reset(tid); };
+
+    // Returns current tid
+    pid_t getTid() const;
+
+    // Sets the tid
+    void setTid(pid_t tid);
+
+    // Reset statistics, keep same tid.
+    void reset();
+
+    // Signal a timing run is beginning
+    void onBegin();
+
+    // Signal a timing run is ending
+    void onEnd();
+
+    // Return the thread snapshot statistics in a string
+    std::string toString() const;
+
+private:
+    mutable std::mutex mLock;
+
+    // State represents our statistics at a given point in time.
+    // It is not thread-safe, so any locking must occur at the caller.
+    struct State {
+        pid_t mTid;
+        int64_t mBeginTimeNs;  // when last run began
+        int64_t mEndTimeNs;    // when last run ends (if less than begin time, not started)
+        int64_t mCumulativeTimeNs;
+
+        // Sched is the scheduler statistics obtained as a string.
+        // This is parsed only when toString() is called.
+        std::string mBeginSched;
+
+        // Clears existing state.
+        void reset(pid_t tid);
+
+        // onBegin() takes a std::string sched should can be captured outside
+        // of locking.
+        void onBegin(std::string sched);
+        void onEnd();
+        std::string toString() const;
+    };
+
+    // Our current state. We only keep the current running state.
+    State mState GUARDED_BY(mLock);
+};
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index 5ba6d7c..ef03aef 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -14,62 +14,94 @@
  * limitations under the License.
  */
 
+#pragma once
 
-#ifndef ANDROID_TIME_CHECK_H
-#define ANDROID_TIME_CHECK_H
-
-#include <utils/KeyedVector.h>
-#include <utils/Thread.h>
 #include <vector>
 
-namespace android {
+#include <mediautils/TimerThread.h>
+
+namespace android::mediautils {
 
 // A class monitoring execution time for a code block (scoped variable) and causing an assert
 // if it exceeds a certain time
 
 class TimeCheck {
-public:
+  public:
+    using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
 
     // The default timeout is chosen to be less than system server watchdog timeout
     static constexpr uint32_t kDefaultTimeOutMs = 5000;
 
-            TimeCheck(const char *tag, uint32_t timeoutMs = kDefaultTimeOutMs);
-            ~TimeCheck();
-    static  void setAudioHalPids(const std::vector<pid_t>& pids);
-    static  std::vector<pid_t> getAudioHalPids();
+    /**
+     * TimeCheck is a RAII object which will notify a callback
+     * on timer expiration or when the object is deallocated.
+     *
+     * TimeCheck is used as a watchdog and aborts by default on timer expiration.
+     * When it aborts, it will also send a debugger signal to pids passed in through
+     * setAudioHalPids().
+     *
+     * If the callback function returns for timeout it will not be called again for
+     * the deallocation.
+     *
+     * \param tag       string associated with the TimeCheck object.
+     * \param onTimer   callback function with 2 parameters
+     *                      bool timeout  (which is true when the TimeCheck object
+     *                                    times out, false when the TimeCheck object is
+     *                                    destroyed or leaves scope before the timer expires.)
+     *                      float elapsedMs (the elapsed time to this event).
+     *                  The callback when timeout is true will be called on a different thread.
+     *                  This will cancel the callback on the destructor but is not guaranteed
+     *                  to block for callback completion if it is already in progress
+     *                  (for maximum concurrency and reduced deadlock potential), so use proper
+     *                  lifetime analysis (e.g. shared or weak pointers).
+     * \param timeoutMs timeout in milliseconds.
+     *                  A zero timeout means no timeout is set -
+     *                  the callback is called only when
+     *                  the TimeCheck object is destroyed or leaves scope.
+     * \param crashOnTimeout true if the object issues an abort on timeout.
+     */
+    explicit TimeCheck(std::string tag, OnTimerFunc&& onTimer = {},
+            uint32_t timeoutMs = kDefaultTimeOutMs, bool crashOnTimeout = true);
 
-private:
+    TimeCheck() = default;
+    // Remove copy constructors as there should only be one call to the destructor.
+    // Move is kept implicitly disabled, but would be logically consistent if enabled.
+    TimeCheck(const TimeCheck& other) = delete;
+    TimeCheck& operator=(const TimeCheck&) = delete;
 
-    class TimeCheckThread : public Thread {
+    ~TimeCheck();
+    static std::string toString();
+    static void setAudioHalPids(const std::vector<pid_t>& pids);
+    static std::vector<pid_t> getAudioHalPids();
+
+  private:
+    // Helper class for handling events.
+    // The usage here is const safe.
+    class TimeCheckHandler {
     public:
+        const std::string tag;
+        const OnTimerFunc onTimer;
+        const bool crashOnTimeout;
+        const std::chrono::system_clock::time_point startTime;
+        const pid_t tid;
 
-                            TimeCheckThread() {}
-        virtual             ~TimeCheckThread() override;
-
-                nsecs_t     startMonitoring(const char *tag, uint32_t timeoutMs);
-                void        stopMonitoring(nsecs_t endTimeNs);
-
-    private:
-
-                // RefBase
-        virtual void        onFirstRef() override { run("TimeCheckThread", PRIORITY_URGENT_AUDIO); }
-
-                // Thread
-        virtual bool        threadLoop() override;
-
-                Condition           mCond;
-                Mutex               mMutex;
-                // using the end time in ns as key is OK given the risk is low that two entries
-                // are added in such a way that <add time> + <timeout> are the same for both.
-                KeyedVector< nsecs_t, const char*>  mMonitorRequests;
+        void onCancel(TimerThread::Handle handle) const;
+        void onTimeout() const;
     };
 
-    static sp<TimeCheckThread> getTimeCheckThread();
+    static TimerThread& getTimeCheckThread();
     static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
 
-    const           nsecs_t mEndTimeNs;
+    // mTimeCheckHandler is immutable, prefer to be first initialized, last destroyed.
+    // Technically speaking, we do not need a shared_ptr here because TimerThread::cancelTask()
+    // is mutually exclusive of the callback, but the price paid for lifetime safety is minimal.
+    const std::shared_ptr<const TimeCheckHandler> mTimeCheckHandler;
+    const TimerThread::Handle mTimerHandle = TimerThread::INVALID_HANDLE;
 };
 
-}; // namespace android
+// Returns a TimeCheck object that sends info to MethodStatistics
+// obtained from getStatisticsForClass(className).
+TimeCheck makeTimeCheckStatsForClassMethod(
+        std::string_view className, std::string_view methodName);
 
-#endif  // ANDROID_TIME_CHECK_H
+}  // namespace android::mediautils
diff --git a/media/utils/include/mediautils/TimerThread.h b/media/utils/include/mediautils/TimerThread.h
new file mode 100644
index 0000000..ffee602
--- /dev/null
+++ b/media/utils/include/mediautils/TimerThread.h
@@ -0,0 +1,270 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <condition_variable>
+#include <deque>
+#include <functional>
+#include <map>
+#include <mutex>
+#include <string>
+#include <thread>
+
+#include <android-base/thread_annotations.h>
+
+namespace android::mediautils {
+
+/**
+ * A thread for deferred execution of tasks, with cancellation.
+ */
+class TimerThread {
+  public:
+    // A Handle is a time_point that serves as a unique key.  It is ordered.
+    using Handle = std::chrono::steady_clock::time_point;
+
+    static inline constexpr Handle INVALID_HANDLE =
+            std::chrono::steady_clock::time_point::min();
+
+    /**
+     * Schedules a task to be executed in the future (`timeout` duration from now).
+     *
+     * \param tag     string associated with the task.  This need not be unique,
+     *                as the Handle returned is used for cancelling.
+     * \param func    callback function that is invoked at the timeout.
+     * \param timeout timeout duration which is converted to milliseconds with at
+     *                least 45 integer bits.
+     *                A timeout of 0 (or negative) means the timer never expires
+     *                so func() is never called. These tasks are stored internally
+     *                and reported in the toString() until manually cancelled.
+     * \returns       a handle that can be used for cancellation.
+     */
+    Handle scheduleTask(
+            std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout);
+
+    /**
+     * Tracks a task that shows up on toString() until cancelled.
+     *
+     * \param tag     string associated with the task.
+     * \returns       a handle that can be used for cancellation.
+     */
+    Handle trackTask(std::string tag);
+
+    /**
+     * Cancels a task previously scheduled with scheduleTask()
+     * or trackTask().
+     *
+     * \returns true if cancelled. If the task has already executed
+     *          or if the handle doesn't exist, this is a no-op
+     *          and returns false.
+     */
+    bool cancelTask(Handle handle);
+
+    std::string toString(size_t retiredCount = SIZE_MAX) const;
+
+    /**
+     * Returns a string representation of the TimerThread queue.
+     *
+     * The queue is dumped in order of scheduling (not deadline).
+     */
+    std::string pendingToString() const;
+
+    /**
+     * Returns a string representation of the last retired tasks.
+     *
+     * These tasks from trackTask() or scheduleTask() are
+     * cancelled.
+     *
+     * These are ordered when the task was retired.
+     *
+     * \param n is maximum number of tasks to dump.
+     */
+    std::string retiredToString(size_t n = SIZE_MAX) const;
+
+
+    /**
+     * Returns a string representation of the last timeout tasks.
+     *
+     * These tasks from scheduleTask() which have  timed-out.
+     *
+     * These are ordered when the task had timed-out.
+     *
+     * \param n is maximum number of tasks to dump.
+     */
+    std::string timeoutToString(size_t n = SIZE_MAX) const;
+
+    /**
+     * Dumps a container with SmartPointer<Request> to a string.
+     *
+     * "{ Request1 } { Request2} ...{ RequestN }"
+     */
+    template <typename T>
+    static std::string requestsToString(const T& containerRequests) {
+        std::string s;
+        // append seems to be faster than stringstream.
+        // https://stackoverflow.com/questions/18892281/most-optimized-way-of-concatenation-in-strings
+        for (const auto& request : containerRequests) {
+            s.append("{ ").append(request->toString()).append(" } ");
+        }
+        // If not empty, there's an extra space at the end, so we trim it off.
+        if (!s.empty()) s.pop_back();
+        return s;
+    }
+
+  private:
+    // To minimize movement of data, we pass around shared_ptrs to Requests.
+    // These are allocated and deallocated outside of the lock.
+    struct Request {
+        const std::chrono::system_clock::time_point scheduled;
+        const std::chrono::system_clock::time_point deadline; // deadline := scheduled + timeout
+                                                              // if deadline == scheduled, no
+                                                              // timeout, task not executed.
+        const pid_t tid;
+        const std::string tag;
+
+        std::string toString() const;
+    };
+
+    // Deque of requests, in order of add().
+    // This class is thread-safe.
+    class RequestQueue {
+      public:
+        explicit RequestQueue(size_t maxSize)
+            : mRequestQueueMax(maxSize) {}
+
+        void add(std::shared_ptr<const Request>);
+
+        // return up to the last "n" requests retired.
+        void copyRequests(std::vector<std::shared_ptr<const Request>>& requests,
+            size_t n = SIZE_MAX) const;
+
+      private:
+        const size_t mRequestQueueMax;
+        mutable std::mutex mRQMutex;
+        std::deque<std::pair<std::chrono::system_clock::time_point,
+                             std::shared_ptr<const Request>>>
+                mRequestQueue GUARDED_BY(mRQMutex);
+    };
+
+    // A storage map of tasks without timeouts.  There is no std::function<void()>
+    // required, it just tracks the tasks with the tag, scheduled time and the tid.
+    // These tasks show up on a pendingToString() until manually cancelled.
+    class NoTimeoutMap {
+        // This a counter of the requests that have no timeout (timeout == 0).
+        std::atomic<size_t> mNoTimeoutRequests{};
+
+        mutable std::mutex mNTMutex;
+        std::map<Handle, std::shared_ptr<const Request>> mMap GUARDED_BY(mNTMutex);
+
+      public:
+        bool isValidHandle(Handle handle) const; // lock free
+        Handle add(std::shared_ptr<const Request> request);
+        std::shared_ptr<const Request> remove(Handle handle);
+        void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+    };
+
+    // Monitor thread.
+    // This thread manages shared pointers to Requests and a function to
+    // call on timeout.
+    // This class is thread-safe.
+    class MonitorThread {
+        mutable std::mutex mMutex;
+        mutable std::condition_variable mCond;
+
+        // Ordered map of requests based on time of deadline.
+        //
+        std::map<Handle, std::pair<std::shared_ptr<const Request>, std::function<void()>>>
+                mMonitorRequests GUARDED_BY(mMutex);
+
+        RequestQueue& mTimeoutQueue; // locked internally, added to when request times out.
+
+        // Worker thread variables
+        bool mShouldExit GUARDED_BY(mMutex) = false;
+
+        // To avoid race with initialization,
+        // mThread should be initialized last as the thread is launched immediately.
+        std::thread mThread;
+
+        void threadFunc();
+        Handle getUniqueHandle_l(std::chrono::milliseconds timeout) REQUIRES(mMutex);
+
+      public:
+        MonitorThread(RequestQueue &timeoutQueue);
+        ~MonitorThread();
+
+        Handle add(std::shared_ptr<const Request> request, std::function<void()>&& func,
+                std::chrono::milliseconds timeout);
+        std::shared_ptr<const Request> remove(Handle handle);
+        void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+    };
+
+    // Analysis contains info deduced by analysisTimeout().
+    //
+    // Summary is the result string from checking timeoutRequests to see if
+    // any might be caused by blocked calls in pendingRequests.
+    //
+    // Summary string is empty if there is no automatic actionable info.
+    //
+    // timeoutTid is the tid selected from timeoutRequests (if any).
+    //
+    // HALBlockedTid is the tid that is blocked from pendingRequests believed
+    // to cause the timeout.
+    // HALBlockedTid may be INVALID_PID if no suspected tid is found,
+    // and if HALBlockedTid is valid, it will not be the same as timeoutTid.
+    //
+    static constexpr pid_t INVALID_PID = -1;
+    struct Analysis {
+        std::string summary;
+        pid_t timeoutTid = INVALID_PID;
+        pid_t HALBlockedTid = INVALID_PID;
+    };
+
+    // A HAL method is where the substring "Hidl" is in the class name.
+    // The tag should look like: ... Hidl ... :: ...
+    static bool isRequestFromHal(const std::shared_ptr<const Request>& request);
+
+    // Returns analysis from the requests.
+    static Analysis analyzeTimeout(
+        const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
+        const std::vector<std::shared_ptr<const Request>>& pendingRequests);
+
+    std::vector<std::shared_ptr<const Request>> getPendingRequests() const;
+
+    // A no-timeout request is represented by a handles at the end of steady_clock time,
+    // counting down by the number of no timeout requests previously requested.
+    // We manage them on the NoTimeoutMap, but conceptually they could be scheduled
+    // on the MonitorThread because those time handles won't expire in
+    // the lifetime of the device.
+    static inline Handle getIndexedHandle(size_t index) {
+        return std::chrono::time_point<std::chrono::steady_clock>::max() -
+                    std::chrono::time_point<std::chrono::steady_clock>::duration(index);
+    }
+
+    static constexpr size_t kRetiredQueueMax = 16;
+    RequestQueue mRetiredQueue{kRetiredQueueMax};  // locked internally
+
+    static constexpr size_t kTimeoutQueueMax = 16;
+    RequestQueue mTimeoutQueue{kTimeoutQueueMax};  // locked internally
+
+    NoTimeoutMap mNoTimeoutMap;  // locked internally
+
+    MonitorThread mMonitorThread{mTimeoutQueue};  // This should be initialized last because
+                                                  // the thread is launched immediately.
+                                                  // Locked internally.
+};
+
+}  // namespace android::mediautils
diff --git a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl b/media/utils/include/mediautils/memory.h
similarity index 60%
copy from media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
copy to media/utils/include/mediautils/memory.h
index 9e04e82..cc00bb6 100644
--- a/media/libaudioclient/aidl/android/media/AudioEncapsulationMode.aidl
+++ b/media/utils/include/mediautils/memory.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,14 +13,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package android.media;
+#pragma once
+
+namespace android {
+
+namespace {
+struct FreeDeleter {
+    void operator()(void* p) { free(p); }
+};
+
+}  // namespace
 
 /**
- * {@hide}
+ * Used to wrap pointers allocated by legacy code using malloc / calloc / etc.
  */
-@Backing(type="int")
-enum AudioEncapsulationMode {
-     NONE = 0,
-     ELEMENTARY_STREAM = 1,
-     HANDLE = 2,
-}
+template <typename T>
+using unique_malloced_ptr = std::unique_ptr<T, FreeDeleter>;
+
+}  // namespace android
diff --git a/media/utils/memory-test.cpp b/media/utils/memory-test.cpp
new file mode 100644
index 0000000..2409bc2
--- /dev/null
+++ b/media/utils/memory-test.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <mediautils/memory.h>
+
+namespace android {
+namespace {
+
+TEST(UniqueMallocedPtr, Void) {
+    unique_malloced_ptr<void> p(std::malloc(10));
+}
+
+TEST(UniqueMallocedPtr, Char) {
+    unique_malloced_ptr<char> p(reinterpret_cast<char*>(std::malloc(10)));
+}
+
+TEST(UniqueMallocedPtr, Null) {
+    unique_malloced_ptr<char> p(nullptr);
+}
+
+TEST(UniqueMallocedPtr, Default) {
+    unique_malloced_ptr<char> p;
+}
+
+}  // namespace
+}  // namespace android
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 6593d56..1024018 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -7,6 +7,82 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
+cc_test_library {
+    name: "libsharedtest",
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    sanitize:{
+       address: true,
+       cfi: true,
+       integer_overflow: true,
+       memtag_heap: true,
+    },
+
+    shared_libs: [
+        "liblog",
+    ],
+
+    srcs: [
+        "sharedtest.cpp",
+    ]
+}
+
+cc_test {
+    name: "library_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    sanitize:{
+       address: true,
+       cfi: true,
+       integer_overflow: true,
+       memtag_heap: true,
+    },
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    data_libs: [
+        "libsharedtest",
+    ],
+
+    srcs: [
+        "library_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "media_process_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "media_process_tests.cpp",
+    ],
+}
+
 cc_test {
     name: "media_synchronization_tests",
 
@@ -26,3 +102,92 @@
         "media_synchronization_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "media_threadsnapshot_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    shared_libs: [
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "media_threadsnapshot_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "mediautils_scopedstatistics_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "mediautils_scopedstatistics_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "methodstatistics_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "methodstatistics_tests.cpp",
+    ],
+}
+
+cc_test {
+    name: "timecheck_tests",
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+
+    sanitize:{
+       address: true,
+       cfi: true,
+       integer_overflow: true,
+       memtag_heap: true,
+    },
+
+    shared_libs: [
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+
+    srcs: [
+        "timecheck_tests.cpp",
+    ],
+}
diff --git a/media/utils/tests/library_tests.cpp b/media/utils/tests/library_tests.cpp
new file mode 100644
index 0000000..c5c500c
--- /dev/null
+++ b/media/utils/tests/library_tests.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "library_tests"
+#include <utils/Log.h>
+
+#include <mediautils/Library.h>
+
+#include <android-base/file.h>
+#include <gtest/gtest.h>
+
+using namespace android::mediautils;
+
+namespace {
+
+static int32_t here = 0;  // accessed on same thread.
+
+TEST(library_tests, basic) {
+    std::string path = android::base::GetExecutableDirectory() + "/libsharedtest.so";
+    // The flags to loadLibrary should not include  RTLD_GLOBAL or RTLD_NODELETE
+    // which prevent unloading.
+    std::shared_ptr<void> library = loadLibrary(path.c_str(), RTLD_LAZY);
+    ASSERT_TRUE(library);
+    ASSERT_EQ(1, library.use_count());
+
+    std::shared_ptr<int32_t*> ptr = getObjectFromLibrary<int32_t*>("gPtr", library);
+    ASSERT_TRUE(ptr);
+    ASSERT_EQ(2, library.use_count());
+
+    ASSERT_EQ(nullptr, *ptr); // original contents are nullptr.
+
+    // There is a static object destructor in libsharedtest.so that will set the
+    // contents of the integer pointer (if non-null) to 1 when called.
+    // This is used to detect that the library is unloaded.
+    *ptr = &here;
+
+    ptr.reset();  // Note: this shared pointer uses library's refcount.
+    ASSERT_EQ(1, library.use_count());  // Verify library's refcount goes down by 1.
+    ASSERT_EQ(0, here);  // the shared library's object destructor hasn't been called.
+
+    // use weak_ptr to investigate whether the library is gone.
+    std::weak_ptr<void> wlibrary = library;
+    ASSERT_EQ(1, wlibrary.use_count());
+    library.reset();
+
+    // we should have released the last reference.
+    ASSERT_EQ(0, wlibrary.use_count());
+
+    // The library should unload and the global object destroyed.
+    // Note on Android, specifying RTLD_GLOBAL or RTLD_NODELETE in the flags
+    // will prevent unloading libraries.
+    ASSERT_EQ(1, here);
+}
+
+TEST(library_tests, sad_library) {
+    std::string path = android::base::GetExecutableDirectory()
+            + "/something_random_library_that_doesn't_exit.so";
+
+    std::shared_ptr<void> library = loadLibrary(path.c_str(), RTLD_LAZY);
+    // We shouldn't crash on an invalid library path, just return an empty shared pointer.
+    // Check the logcat for any error details.
+    ASSERT_FALSE(library);
+}
+
+} // namespace
diff --git a/media/utils/tests/media_process_tests.cpp b/media/utils/tests/media_process_tests.cpp
new file mode 100644
index 0000000..2ae3f70
--- /dev/null
+++ b/media/utils/tests/media_process_tests.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/Process.h>
+
+#define LOG_TAG "media_process_tests"
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+TEST(media_process_tests, basic) {
+  const std::string schedString = getThreadSchedAsString(gettid());
+
+  (void)schedString;
+  // We don't test schedString, only that we haven't crashed.
+  // ASSERT_FALSE(schedString.empty());
+
+  // schedString is not normative.  So we conjure up our own string
+  const std::string fakeString = "\
+AudioOut_8D (10800, #threads: 36)\n\
+-------------------------------------------------------------------\n\
+se.exec_start                                :       8132077.598026\n\
+se.vruntime                                  :        798689.872087\n\
+se.sum_exec_runtime                          :        136466.957838\n\
+se.nr_migrations                             :               132487\n\
+se.statistics.sum_sleep_runtime              :       5629794.565945\n\
+se.statistics.wait_start                     :             0.000000\n\
+se.statistics.sleep_start                    :       8195727.586392\n\
+se.statistics.block_start                    :             0.000000\n\
+se.statistics.sleep_max                      :       1995665.869808\n\
+se.statistics.block_max                      :             0.591675\n\
+se.statistics.exec_max                       :             2.477580\n\
+se.statistics.slice_max                      :             0.000000\n\
+se.statistics.wait_max                       :             8.608642\n\
+se.statistics.wait_sum                       :          4683.266835\n\
+se.statistics.wait_count                     :               300964\n\
+se.statistics.iowait_sum                     :             0.000000\n\
+se.statistics.iowait_count                   :                    0\n\
+se.statistics.nr_migrations_cold             :                    0\n\
+se.statistics.nr_failed_migrations_affine    :                  297\n\
+se.statistics.nr_failed_migrations_running   :                 1412\n\
+se.statistics.nr_failed_migrations_hot       :                   96\n\
+se.statistics.nr_forced_migrations           :                   26\n\
+se.statistics.nr_wakeups                     :               281263\n\
+se.statistics.nr_wakeups_sync                :                   84\n\
+se.statistics.nr_wakeups_migrate             :               132322\n\
+se.statistics.nr_wakeups_local               :                 2165\n\
+se.statistics.nr_wakeups_remote              :               279098\n\
+se.statistics.nr_wakeups_affine              :                    0\n\
+se.statistics.nr_wakeups_affine_attempts     :                    0\n\
+se.statistics.nr_wakeups_passive             :                    0\n\
+se.statistics.nr_wakeups_idle                :                    0\n\
+avg_atom                                     :             0.453434\n\
+avg_per_cpu                                  :             1.030040\n\
+nr_switches                                  :               300963\n\
+nr_voluntary_switches                        :               281252\n\
+nr_involuntary_switches                      :                19711\n\
+se.load.weight                               :             73477120\n\
+se.avg.load_sum                              :                   58\n\
+se.avg.runnable_sum                          :                27648\n\
+se.avg.util_sum                              :                21504\n\
+se.avg.load_avg                              :                   48\n\
+se.avg.runnable_avg                          :                    0\n\
+se.avg.util_avg                              :                    0\n\
+se.avg.last_update_time                      :        8132075824128\n\
+se.avg.util_est.ewma                         :                    8\n\
+se.avg.util_est.enqueued                     :                    1\n\
+uclamp.min                                   :                    0\n\
+uclamp.max                                   :                 1024\n\
+effective uclamp.min                         :                    0\n\
+effective uclamp.max                         :                 1024\n\
+policy                                       :                    0\n\
+prio                                         :                  101\n\
+clock-delta                                  :                  163";
+
+  std::map<std::string, double> m = parseThreadSchedString(fakeString);
+
+  auto it = m.find("clock-delta");
+  ASSERT_NE(it, m.end());
+  ASSERT_EQ(it->second, 163);
+
+  it = m.find("se.avg.load_avg");
+  ASSERT_NE(it, m.end());
+  ASSERT_EQ(it->second, 48);
+}
diff --git a/media/utils/tests/media_threadsnapshot_tests.cpp b/media/utils/tests/media_threadsnapshot_tests.cpp
new file mode 100644
index 0000000..c7a45e2
--- /dev/null
+++ b/media/utils/tests/media_threadsnapshot_tests.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/ThreadSnapshot.h>
+
+#define LOG_TAG "media_threadsnapshot_tests"
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <chrono>
+#include <thread>
+
+using namespace android;
+using namespace android::mediautils;
+
+TEST(media_threadsnapshot_tests, basic) {
+  using namespace std::chrono_literals;
+
+  ThreadSnapshot threadSnapshot(gettid());
+
+  threadSnapshot.onBegin();
+
+  std::string snapshot1 = threadSnapshot.toString();
+
+  std::this_thread::sleep_for(100ms);
+
+  threadSnapshot.onEnd();
+
+  std::string snapshot2 = threadSnapshot.toString();
+
+  // Either we can't get a snapshot, or they must be different when taken when thread is running.
+  if (snapshot1.empty()) {
+    ASSERT_TRUE(snapshot2.empty());
+  } else {
+    ASSERT_FALSE(snapshot2.empty());
+    ASSERT_NE(snapshot1, snapshot2);
+  }
+}
diff --git a/media/utils/tests/mediautils_scopedstatistics_tests.cpp b/media/utils/tests/mediautils_scopedstatistics_tests.cpp
new file mode 100644
index 0000000..807ce63
--- /dev/null
+++ b/media/utils/tests/mediautils_scopedstatistics_tests.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "mediautils_scopedstatistics_tests"
+
+#include <mediautils/ScopedStatistics.h>
+
+#include <atomic>
+#include <chrono>
+#include <gtest/gtest.h>
+#include <thread>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using namespace std::chrono_literals;
+
+TEST(mediautils_scopedstatistics_tests, basic) {
+    auto methodStatistics = std::make_shared<MethodStatistics<std::string>>();
+    std::string METHOD_NAME{"MyMethod"};
+
+    // no stats before
+    auto empty = methodStatistics->getStatistics(METHOD_NAME);
+    ASSERT_EQ(0, empty.getN());
+
+    // create a scoped statistics object.
+    {
+        ScopedStatistics scopedStatistics(METHOD_NAME, methodStatistics);
+
+        std::this_thread::sleep_for(100ms);
+    }
+
+    // check that some stats were logged.
+    auto stats = methodStatistics->getStatistics(METHOD_NAME);
+    ASSERT_EQ(1, stats.getN());
+    auto mean = stats.getMean();
+
+    // mean should be about 100ms, but to avoid false failures,
+    // we check 50ms < mean < 300ms.
+    ASSERT_GT(mean, 50.);   // took more than 50ms.
+    ASSERT_LT(mean, 300.);  // took less than 300ms.
+}
diff --git a/media/utils/tests/methodstatistics_tests.cpp b/media/utils/tests/methodstatistics_tests.cpp
new file mode 100644
index 0000000..85c4ad5
--- /dev/null
+++ b/media/utils/tests/methodstatistics_tests.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "methodstatistics_tests"
+
+#include <mediautils/MethodStatistics.h>
+
+#include <atomic>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using CodeType = size_t;
+
+constexpr CodeType HELLO_CODE = 10;
+constexpr const char * HELLO_NAME = "hello";
+constexpr float HELLO_EVENTS[] = { 1.f, 3.f }; // needs lossless average
+
+constexpr CodeType WORLD_CODE = 21;
+constexpr const char * WORLD_NAME = "world";
+
+constexpr CodeType UNKNOWN_CODE = 12345;
+
+TEST(methodstatistics_tests, method_names) {
+    const MethodStatistics<CodeType> methodStatistics{
+            {HELLO_CODE, HELLO_NAME},
+            {WORLD_CODE, WORLD_NAME},
+    };
+
+    ASSERT_EQ(std::string(HELLO_NAME), methodStatistics.getMethodForCode(HELLO_CODE));
+    ASSERT_EQ(std::string(WORLD_NAME), methodStatistics.getMethodForCode(WORLD_CODE));
+    // an unknown code returns itself as a number.
+    ASSERT_EQ(std::to_string(UNKNOWN_CODE), methodStatistics.getMethodForCode(UNKNOWN_CODE));
+}
+
+TEST(methodstatistics_tests, events) {
+    MethodStatistics<CodeType> methodStatistics{
+            {HELLO_CODE, HELLO_NAME},
+            {WORLD_CODE, WORLD_NAME},
+    };
+
+    size_t n = 0;
+    float sum = 0.f;
+    for (const auto event : HELLO_EVENTS) {
+        methodStatistics.event(HELLO_CODE, event);
+        sum += event;
+        ++n;
+    }
+
+    const auto helloStats = methodStatistics.getStatistics(HELLO_CODE);
+    ASSERT_EQ((signed)n, helloStats.getN());
+    ASSERT_EQ(sum / n, helloStats.getMean());
+    ASSERT_EQ(n, methodStatistics.getMethodCount(HELLO_CODE));
+
+    const auto unsetStats = methodStatistics.getStatistics(UNKNOWN_CODE);
+    ASSERT_EQ(0, unsetStats.getN());
+    ASSERT_EQ(0.f, unsetStats.getMean());
+    ASSERT_EQ(0U, methodStatistics.getMethodCount(UNKNOWN_CODE));
+}
diff --git a/media/utils/tests/sharedtest.cpp b/media/utils/tests/sharedtest.cpp
new file mode 100644
index 0000000..3888874
--- /dev/null
+++ b/media/utils/tests/sharedtest.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#define LOG_TAG "sharedtest"
+#include <utils/Log.h>
+
+// Test library which is dynamicly loaded by library_tests.
+
+// Static variable construction.
+// Calls A constructor on library load, A destructor on library unload.
+
+int32_t *gPtr = nullptr;  // this pointer is filled with the location to set memory
+                          // when ~A() is called.
+                          // we cannot use anything internal to this file as the
+                          // data segment may no longer exist after unloading the library.
+struct A {
+    A() {
+        ALOGD("%s: gPtr:%p", __func__, gPtr);
+    }
+
+    ~A() {
+        ALOGD("%s: gPtr:%p", __func__, gPtr);
+        if (gPtr != nullptr) {
+            *gPtr = 1;
+        }
+    }
+} gA;
+
+//  __attribute__((constructor)) methods occur before any static variable construction.
+// Libraries that use __attribute__((constructor)) should not rely on global constructors
+// before method call because they will not be initialized before use.
+// See heapprofd_client_api.
+// NOTE: is this right? Shouldn't it occur after construction?
+ __attribute__((constructor))
+void onConstruction() {
+    ALOGD("%s: in progress", __func__);  // for logcat analysis
+}
+
+// __attribute__((destructor)) methods occur before any static variable destruction.
+ __attribute__((destructor))
+void onDestruction() {
+    ALOGD("%s: in progress", __func__);  // for logcat analysis
+}
diff --git a/media/utils/tests/timecheck_tests.cpp b/media/utils/tests/timecheck_tests.cpp
new file mode 100644
index 0000000..6ebf44d
--- /dev/null
+++ b/media/utils/tests/timecheck_tests.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "timecheck_tests"
+
+#include <mediautils/TimeCheck.h>
+
+#include <atomic>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using namespace std::chrono_literals;
+
+namespace {
+
+TEST(timecheck_tests, success) {
+    bool timeoutRegistered = false;
+    float elapsedMsRegistered = 0.f;
+    bool event = false;
+
+    {
+        TimeCheck timeCheck("success",
+                [&event, &timeoutRegistered, &elapsedMsRegistered]
+                        (bool timeout, float elapsedMs) {
+            timeoutRegistered = timeout;
+            elapsedMsRegistered = elapsedMs;
+            event = true;
+        }, 1000 /* msec */, false /* crash */);
+    }
+    ASSERT_TRUE(event);
+    ASSERT_FALSE(timeoutRegistered);
+    ASSERT_GT(elapsedMsRegistered, 0.f);
+}
+
+TEST(timecheck_tests, timeout) {
+    bool timeoutRegistered = false;
+    float elapsedMsRegistered = 0.f;
+    std::atomic_bool event = false;  // seq-cst implies acquire-release
+
+    {
+        TimeCheck timeCheck("timeout",
+                [&event, &timeoutRegistered, &elapsedMsRegistered]
+                        (bool timeout, float elapsedMs) {
+            timeoutRegistered = timeout;
+            elapsedMsRegistered = elapsedMs;
+            event = true; // store-release, must be last.
+        }, 1 /* msec */, false /* crash */);
+        std::this_thread::sleep_for(100ms);
+    }
+    ASSERT_TRUE(event); // load-acquire, must be first.
+    ASSERT_TRUE(timeoutRegistered); // only called once on failure, not on dealloc.
+    ASSERT_GT(elapsedMsRegistered, 0.f);
+}
+
+// Note: We do not test TimeCheck crash because TimeCheck is multithreaded and the
+// EXPECT_EXIT() signal catching is imperfect due to the gtest fork.
+
+} // namespace
\ No newline at end of file
diff --git a/services/Android.mk b/services/Android.mk
new file mode 100644
index 0000000..c86a226
--- /dev/null
+++ b/services/Android.mk
@@ -0,0 +1 @@
+$(eval $(call declare-1p-copy-files,frameworks/av/services/audiopolicy,))
diff --git a/services/OWNERS b/services/OWNERS
index f0b5e2f..17e605d 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -1,9 +1,6 @@
-chz@google.com
 elaurent@google.com
 essick@google.com
 etalvala@google.com
-gkasten@google.com
 hunga@google.com
-marcone@google.com
 nchalko@google.com
 quxiangfang@google.com
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index b91f302..763c070 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -41,6 +41,7 @@
         "FastThreadState.cpp",
         "NBAIO_Tee.cpp",
         "PatchPanel.cpp",
+        "PropertyUtils.cpp",
         "SpdifStreamOut.cpp",
         "StateQueue.cpp",
         "Threads.cpp",
@@ -54,6 +55,7 @@
     ],
 
     shared_libs: [
+        "android.media.audio.common.types-V1-cpp",
         "audioflinger-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "av-types-aidl-cpp",
@@ -81,6 +83,7 @@
         "libmedia_helper",
         "libshmemcompat",
         "libvibrator",
+        "packagemanager_aidl-cpp",
     ],
 
     static_libs: [
@@ -90,6 +93,7 @@
     ],
 
     header_libs: [
+        "libaaudio_headers",
         "libaudioclient_headers",
         "libaudiohal_headers",
         "libmedia_headers",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3a3fb5e..f7576f6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -31,6 +31,7 @@
 #include <sys/resource.h>
 #include <thread>
 
+#include <android-base/stringprintf.h>
 #include <android/media/IAudioPolicyService.h>
 #include <android/os/IExternalVibratorService.h>
 #include <binder/IPCThreadState.h>
@@ -57,6 +58,7 @@
 
 #include "AudioFlinger.h"
 #include "NBAIO_Tee.h"
+#include "PropertyUtils.h"
 
 #include <media/AudioResamplerPublic.h>
 
@@ -77,6 +79,7 @@
 #include <media/nbaio/PipeReader.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/MemoryLeakTrackUtil.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
 #include <private/android_filesystem_config.h>
@@ -103,7 +106,12 @@
 
 namespace android {
 
+#define MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION 7.1
+
+using ::android::base::StringPrintf;
 using media::IEffectClient;
+using media::audio::common::AudioMMapPolicyInfo;
+using media::audio::common::AudioMMapPolicyType;
 using android::content::AttributionSourceState;
 
 static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
@@ -151,6 +159,92 @@
     return sExternalVibratorService;
 }
 
+// Creates association between Binder code to name for IAudioFlinger.
+#define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(createTrack) \
+BINDER_METHOD_ENTRY(createRecord) \
+BINDER_METHOD_ENTRY(sampleRate) \
+BINDER_METHOD_ENTRY(format) \
+BINDER_METHOD_ENTRY(frameCount) \
+BINDER_METHOD_ENTRY(latency) \
+BINDER_METHOD_ENTRY(setMasterVolume) \
+BINDER_METHOD_ENTRY(setMasterMute) \
+BINDER_METHOD_ENTRY(masterVolume) \
+BINDER_METHOD_ENTRY(masterMute) \
+BINDER_METHOD_ENTRY(setStreamVolume) \
+BINDER_METHOD_ENTRY(setStreamMute) \
+BINDER_METHOD_ENTRY(streamVolume) \
+BINDER_METHOD_ENTRY(streamMute) \
+BINDER_METHOD_ENTRY(setMode) \
+BINDER_METHOD_ENTRY(setMicMute) \
+BINDER_METHOD_ENTRY(getMicMute) \
+BINDER_METHOD_ENTRY(setRecordSilenced) \
+BINDER_METHOD_ENTRY(setParameters) \
+BINDER_METHOD_ENTRY(getParameters) \
+BINDER_METHOD_ENTRY(registerClient) \
+BINDER_METHOD_ENTRY(getInputBufferSize) \
+BINDER_METHOD_ENTRY(openOutput) \
+BINDER_METHOD_ENTRY(openDuplicateOutput) \
+BINDER_METHOD_ENTRY(closeOutput) \
+BINDER_METHOD_ENTRY(suspendOutput) \
+BINDER_METHOD_ENTRY(restoreOutput) \
+BINDER_METHOD_ENTRY(openInput) \
+BINDER_METHOD_ENTRY(closeInput) \
+BINDER_METHOD_ENTRY(invalidateStream) \
+BINDER_METHOD_ENTRY(setVoiceVolume) \
+BINDER_METHOD_ENTRY(getRenderPosition) \
+BINDER_METHOD_ENTRY(getInputFramesLost) \
+BINDER_METHOD_ENTRY(newAudioUniqueId) \
+BINDER_METHOD_ENTRY(acquireAudioSessionId) \
+BINDER_METHOD_ENTRY(releaseAudioSessionId) \
+BINDER_METHOD_ENTRY(queryNumberEffects) \
+BINDER_METHOD_ENTRY(queryEffect) \
+BINDER_METHOD_ENTRY(getEffectDescriptor) \
+BINDER_METHOD_ENTRY(createEffect) \
+BINDER_METHOD_ENTRY(moveEffects) \
+BINDER_METHOD_ENTRY(loadHwModule) \
+BINDER_METHOD_ENTRY(getPrimaryOutputSamplingRate) \
+BINDER_METHOD_ENTRY(getPrimaryOutputFrameCount) \
+BINDER_METHOD_ENTRY(setLowRamDevice) \
+BINDER_METHOD_ENTRY(getAudioPort) \
+BINDER_METHOD_ENTRY(createAudioPatch) \
+BINDER_METHOD_ENTRY(releaseAudioPatch) \
+BINDER_METHOD_ENTRY(listAudioPatches) \
+BINDER_METHOD_ENTRY(setAudioPortConfig) \
+BINDER_METHOD_ENTRY(getAudioHwSyncForSession) \
+BINDER_METHOD_ENTRY(systemReady) \
+BINDER_METHOD_ENTRY(audioPolicyReady) \
+BINDER_METHOD_ENTRY(frameCountHAL) \
+BINDER_METHOD_ENTRY(getMicrophones) \
+BINDER_METHOD_ENTRY(setMasterBalance) \
+BINDER_METHOD_ENTRY(getMasterBalance) \
+BINDER_METHOD_ENTRY(setEffectSuspended) \
+BINDER_METHOD_ENTRY(setAudioHalPids) \
+BINDER_METHOD_ENTRY(setVibratorInfos) \
+BINDER_METHOD_ENTRY(updateSecondaryOutputs) \
+BINDER_METHOD_ENTRY(getMmapPolicyInfos) \
+BINDER_METHOD_ENTRY(getAAudioMixerBurstCount) \
+BINDER_METHOD_ENTRY(getAAudioHardwareBurstMinUsec) \
+BINDER_METHOD_ENTRY(setDeviceConnectedState) \
+
+// singleton for Binder Method Statistics for IAudioFlinger
+static auto& getIAudioFlingerStatistics() {
+    using Code = android::AudioFlingerServerAdapter::Delegate::TransactionCode;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+    {(Code)media::BnAudioFlingerService::TRANSACTION_##ENTRY, #ENTRY},
+
+    static mediautils::MethodStatistics<Code> methodStatistics{
+        IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST
+        METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+    };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+    return methodStatistics;
+}
+
 class DevicesFactoryHalCallbackImpl : public DevicesFactoryHalCallback {
   public:
     void onNewDevicesAvailable() override {
@@ -269,7 +363,7 @@
     mMediaLogNotifier->run("MediaLogNotifier");
     std::vector<pid_t> halPids;
     mDevicesFactoryHal->getHalPids(&halPids);
-    TimeCheck::setAudioHalPids(halPids);
+    mediautils::TimeCheck::setAudioHalPids(halPids);
 
     // Notify that we have started (also called when audioserver service restarts)
     mediametrics::LogItem(mMetricsId)
@@ -301,10 +395,15 @@
 
     mDevicesFactoryHalCallback = new DevicesFactoryHalCallbackImpl;
     mDevicesFactoryHal->setCallbackOnce(mDevicesFactoryHalCallback);
+
+    if (mDevicesFactoryHal->getHalVersion() <= MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION) {
+        mAAudioBurstsPerBuffer = getAAudioMixerBurstCountFromSystemProperty();
+        mAAudioHwBurstMinMicros = getAAudioHardwareBurstMinUsecFromSystemProperty();
+    }
 }
 
 status_t AudioFlinger::setAudioHalPids(const std::vector<pid_t>& pids) {
-  TimeCheck::setAudioHalPids(pids);
+  mediautils::TimeCheck::setAudioHalPids(pids);
   return NO_ERROR;
 }
 
@@ -336,6 +435,62 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::getMmapPolicyInfos(
+            AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
+    Mutex::Autolock _l(mLock);
+    if (const auto it = mPolicyInfos.find(policyType); it != mPolicyInfos.end()) {
+        *policyInfos = it->second;
+        return NO_ERROR;
+    }
+    if (mDevicesFactoryHal->getHalVersion() > MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION) {
+        AutoMutex lock(mHardwareLock);
+        for (size_t i = 0; i < mAudioHwDevs.size(); ++i) {
+            AudioHwDevice *dev = mAudioHwDevs.valueAt(i);
+            std::vector<AudioMMapPolicyInfo> infos;
+            status_t status = dev->getMmapPolicyInfos(policyType, &infos);
+            if (status != NO_ERROR) {
+                ALOGE("Failed to query mmap policy info of %d, error %d",
+                      mAudioHwDevs.keyAt(i), status);
+                continue;
+            }
+            policyInfos->insert(policyInfos->end(), infos.begin(), infos.end());
+        }
+        mPolicyInfos[policyType] = *policyInfos;
+    } else {
+        getMmapPolicyInfosFromSystemProperty(policyType, policyInfos);
+        mPolicyInfos[policyType] = *policyInfos;
+    }
+    return NO_ERROR;
+}
+
+int32_t AudioFlinger::getAAudioMixerBurstCount() {
+    Mutex::Autolock _l(mLock);
+    return mAAudioBurstsPerBuffer;
+}
+
+int32_t AudioFlinger::getAAudioHardwareBurstMinUsec() {
+    Mutex::Autolock _l(mLock);
+    return mAAudioHwBurstMinMicros;
+}
+
+status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) {
+    status_t final_result = NO_INIT;
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    mHardwareStatus = AUDIO_HW_SET_CONNECTED_STATE;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+        status_t result = dev->setConnectedState(port, connected);
+        // Same logic as with setParameter: it's a success if at least one
+        // HAL module accepts the update.
+        if (final_result != NO_ERROR) {
+            final_result = result;
+        }
+    }
+    mHardwareStatus = AUDIO_HW_IDLE;
+    return final_result;
+}
+
 // getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
 std::optional<media::AudioVibratorInfo> AudioFlinger::getDefaultVibratorInfo_l() {
     if (mAudioVibratorInfos.empty()) {
@@ -430,14 +585,14 @@
         fullConfig.channel_mask = config->channel_mask;
         fullConfig.format = config->format;
         std::vector<audio_io_handle_t> secondaryOutputs;
-
+        bool isSpatialized;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
                                             &streamType, client.attributionSource,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
-                                            deviceId, &portId, &secondaryOutputs);
+                                            deviceId, &portId, &secondaryOutputs, &isSpatialized);
         ALOGW_IF(!secondaryOutputs.empty(),
                  "%s does not support secondary outputs, ignoring them", __func__);
     } else {
@@ -567,10 +722,12 @@
     String8 result;
 
     result.append("Clients:\n");
+    result.append("   pid    heap_size\n");
     for (size_t i = 0; i < mClients.size(); ++i) {
         sp<Client> client = mClients.valueAt(i).promote();
         if (client != 0) {
-            result.appendFormat("  pid: %d\n", client->pid());
+            result.appendFormat("%6d %12zu\n", client->pid(),
+                    client->heap()->getMemoryHeap()->getSize());
         }
     }
 
@@ -758,6 +915,36 @@
             std::string s = GetUnreachableMemoryString(true /* contents */, 100 /* limit */);
             write(fd, s.c_str(), s.size());
         }
+        {
+            std::string timeCheckStats = getIAudioFlingerStatistics().dump();
+            dprintf(fd, "\nIAudioFlinger binder call profile:\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+
+            extern mediautils::MethodStatistics<int>& getIEffectStatistics();
+            timeCheckStats = getIEffectStatistics().dump();
+            dprintf(fd, "\nIEffect binder call profile:\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+
+            // Automatically fetch HIDL statistics.
+            std::shared_ptr<std::vector<std::string>> hidlClassNames =
+                    mediautils::getStatisticsClassesForModule(
+                            METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL);
+            if (hidlClassNames) {
+                for (const auto& className : *hidlClassNames) {
+                    auto stats = mediautils::getStatisticsForClass(className);
+                    if (stats) {
+                        timeCheckStats = stats->dump();
+                        dprintf(fd, "\n%s binder call profile:\n", className.c_str());
+                        write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+                    }
+                }
+            }
+
+            timeCheckStats = mediautils::TimeCheck::toString();
+            dprintf(fd, "\nTimeCheck:\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+            dprintf(fd, "\n");
+        }
     }
     return NO_ERROR;
 }
@@ -847,6 +1034,7 @@
     audio_stream_type_t streamType;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     std::vector<audio_io_handle_t> secondaryOutputs;
+    bool isSpatialized = false;;
 
     // TODO b/182392553: refactor or make clearer
     pid_t clientPid =
@@ -890,7 +1078,8 @@
     output.selectedDeviceId = input.selectedDeviceId;
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             adjAttributionSource, &input.config, input.flags,
-                                            &output.selectedDeviceId, &portId, &secondaryOutputs);
+                                            &output.selectedDeviceId, &portId, &secondaryOutputs,
+                                            &isSpatialized);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -956,7 +1145,7 @@
                                       input.notificationsPerBuffer, input.speed,
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
-                                      &lStatus, portId, input.audioTrackCallback);
+                                      &lStatus, portId, input.audioTrackCallback, isSpatialized);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -998,8 +1187,9 @@
                 }
             }
         }
-
-        setAudioHwSyncForSession_l(thread, sessionId);
+        if ((output.flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == AUDIO_OUTPUT_FLAG_HW_AV_SYNC) {
+            setAudioHwSyncForSession_l(thread, sessionId);
+        }
     }
 
     if (lStatus != NO_ERROR) {
@@ -1874,13 +2064,13 @@
     }
 }
 
-void AudioFlinger::ioConfigChanged(audio_io_config_event event,
+void AudioFlinger::ioConfigChanged(audio_io_config_event_t event,
                                    const sp<AudioIoDescriptor>& ioDesc,
                                    pid_t pid) {
+    media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
+            legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(event));
     media::AudioIoDescriptor descAidl = VALUE_OR_FATAL(
             legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
-    media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
-            legacy2aidl_audio_io_config_event_AudioIoConfigEvent(event));
 
     Mutex::Autolock _l(mClientLock);
     size_t size = mNotificationClients.size();
@@ -2143,6 +2333,20 @@
             goto Exit;
         }
 
+        if (recordTrack->isFastTrack()) {
+            output.serverConfig = {
+                    thread->sampleRate(),
+                    thread->channelMask(),
+                    thread->format()
+            };
+        } else {
+            output.serverConfig = {
+                    recordTrack->sampleRate(),
+                    recordTrack->channelMask(),
+                    recordTrack->format()
+            };
+        }
+
         // Check if one effect chain was awaiting for an AudioRecord to be created on this
         // session and move it to this thread.
         sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
@@ -2273,6 +2477,18 @@
         mHardwareStatus = AUDIO_HW_IDLE;
     }
 
+    if (mDevicesFactoryHal->getHalVersion() > MAX_AAUDIO_PROPERTY_DEVICE_HAL_VERSION) {
+        if (int32_t mixerBursts = dev->getAAudioMixerBurstCount();
+            mixerBursts > 0 && mixerBursts > mAAudioBurstsPerBuffer) {
+            mAAudioBurstsPerBuffer = mixerBursts;
+        }
+        if (int32_t hwBurstMinMicros = dev->getAAudioHardwareBurstMinUsec();
+            hwBurstMinMicros > 0
+            && (hwBurstMinMicros < mAAudioHwBurstMinMicros || mAAudioHwBurstMinMicros == 0)) {
+            mAAudioHwBurstMinMicros = hwBurstMinMicros;
+        }
+    }
+
     mAudioHwDevs.add(handle, audioDevice);
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
@@ -2397,21 +2613,17 @@
     if (dev == nullptr) {
         return AUDIO_HW_SYNC_INVALID;
     }
-    String8 reply;
-    AudioParameter param;
-    if (dev->getParameters(String8(AudioParameter::keyHwAvSync), &reply) == OK) {
-        param = AudioParameter(reply);
-    }
 
-    int value;
-    if (param.getInt(String8(AudioParameter::keyHwAvSync), value) != NO_ERROR) {
+    error::Result<audio_hw_sync_t> result = dev->getHwAvSync();
+    if (!result.ok()) {
         ALOGW("getAudioHwSyncForSession error getting sync for session %d", sessionId);
         return AUDIO_HW_SYNC_INVALID;
     }
+    audio_hw_sync_t value = VALUE_OR_FATAL(result);
 
     // allow only one session for a given HW A/V sync ID.
     for (size_t i = 0; i < mHwAvSyncIds.size(); i++) {
-        if (mHwAvSyncIds.valueAt(i) == (audio_hw_sync_t)value) {
+        if (mHwAvSyncIds.valueAt(i) == value) {
             ALOGV("getAudioHwSyncForSession removing ID %d for session %d",
                   value, mHwAvSyncIds.keyAt(i));
             mHwAvSyncIds.removeItemsAt(i);
@@ -2514,7 +2726,7 @@
 {
     AudioHwDevice *outHwDev = findSuitableHwDev_l(module, deviceType);
     if (outHwDev == NULL) {
-        return 0;
+        return nullptr;
     }
 
     if (*output == AUDIO_IO_HANDLE_NONE) {
@@ -2523,9 +2735,17 @@
         // Audio Policy does not currently request a specific output handle.
         // If this is ever needed, see openInput_l() for example code.
         ALOGE("openOutput_l requested output handle %d is not AUDIO_IO_HANDLE_NONE", *output);
-        return 0;
+        return nullptr;
     }
 
+#ifndef MULTICHANNEL_EFFECT_CHAIN
+    if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
+        ALOGE("openOutput_l() cannot create spatializer thread "
+                "without #define MULTICHANNEL_EFFECT_CHAIN");
+        return nullptr;
+    }
+#endif
+
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
 
     // FOR TESTING ONLY:
@@ -2570,18 +2790,11 @@
             return thread;
         } else {
             sp<PlaybackThread> thread;
-            //TODO: b/193496180 use spatializer flag at audio HAL when available
-            if (flags == (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_FAST
-                                                    | AUDIO_OUTPUT_FLAG_DEEP_BUFFER)) {
-#ifdef MULTICHANNEL_EFFECT_CHAIN
+            if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
                 thread = new SpatializerThread(this, outputStream, *output,
                                                     mSystemReady, mixerConfig);
-                ALOGD("openOutput_l() created spatializer output: ID %d thread %p",
+                ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
                       *output, thread.get());
-#else
-                ALOGE("openOutput_l() cannot create spatializer thread "
-                        "without #define MULTICHANNEL_EFFECT_CHAIN");
-#endif
             } else if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
                 thread = new OffloadThread(this, outputStream, *output, mSystemReady);
                 ALOGV("openOutput_l() created offload output: ID %d thread %p",
@@ -2607,7 +2820,7 @@
         }
     }
 
-    return 0;
+    return nullptr;
 }
 
 status_t AudioFlinger::openOutput(const media::OpenOutputRequest& request,
@@ -2616,9 +2829,9 @@
     audio_module_handle_t module = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_module_handle_t(request.module));
     audio_config_t halConfig = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioConfig_audio_config_t(request.halConfig));
+            aidl2legacy_AudioConfig_audio_config_t(request.halConfig, false /*isInput*/));
     audio_config_base_t mixerConfig = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioConfigBase_audio_config_base_t(request.mixerConfig));
+            aidl2legacy_AudioConfigBase_audio_config_base_t(request.mixerConfig, false/*isInput*/));
     sp<DeviceDescriptorBase> device = VALUE_OR_RETURN_STATUS(
             aidl2legacy_DeviceDescriptorBase(request.device));
     audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
@@ -2671,8 +2884,8 @@
             mmapThread->ioConfigChanged(AUDIO_OUTPUT_OPENED);
         }
         response->output = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-        response->config =
-                VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(halConfig));
+        response->config = VALUE_OR_RETURN_STATUS(
+                legacy2aidl_audio_config_t_AudioConfig(halConfig, false /*isInput*/));
         response->latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(latencyMs));
         response->flags = VALUE_OR_RETURN_STATUS(
                 legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
@@ -2758,9 +2971,7 @@
             mMmapThreads.removeItem(output);
             ALOGD("closing mmapThread %p", mmapThread.get());
         }
-        const sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
-        ioDesc->mIoHandle = output;
-        ioConfigChanged(AUDIO_OUTPUT_CLOSED, ioDesc);
+        ioConfigChanged(AUDIO_OUTPUT_CLOSED, sp<AudioIoDescriptor>::make(output));
         mPatchPanel.notifyStreamClosed(output);
     }
     // The thread entity (active unit of execution) is no longer running here,
@@ -2833,16 +3044,16 @@
 {
     Mutex::Autolock _l(mLock);
 
-    if (request.device.type == AUDIO_DEVICE_NONE) {
+    AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioDeviceTypeAddress(request.device));
+    if (device.mType == AUDIO_DEVICE_NONE) {
         return BAD_VALUE;
     }
 
     audio_io_handle_t input = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_io_handle_t(request.input));
     audio_config_t config = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioConfig_audio_config_t(request.config));
-    AudioDeviceTypeAddr device = VALUE_OR_RETURN_STATUS(
-            aidl2legacy_AudioDeviceTypeAddress(request.device));
+            aidl2legacy_AudioConfig_audio_config_t(request.config, true /*isInput*/));
 
     sp<ThreadBase> thread = openInput_l(
             VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_module_handle_t(request.module)),
@@ -2850,13 +3061,14 @@
             &config,
             device.mType,
             device.address().c_str(),
-            VALUE_OR_RETURN_STATUS(aidl2legacy_AudioSourceType_audio_source_t(request.source)),
+            VALUE_OR_RETURN_STATUS(aidl2legacy_AudioSource_audio_source_t(request.source)),
             VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_input_flags_t_mask(request.flags)),
             AUDIO_DEVICE_NONE,
             String8{});
 
     response->input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(input));
-    response->config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(config));
+    response->config = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(config, true /*isInput*/));
     response->device = request.device;
 
     if (thread != 0) {
@@ -3018,9 +3230,7 @@
             dumpToThreadLog_l(mmapThread);
             mMmapThreads.removeItem(input);
         }
-        const sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
-        ioDesc->mIoHandle = input;
-        ioConfigChanged(AUDIO_INPUT_CLOSED, ioDesc);
+        ioConfigChanged(AUDIO_INPUT_CLOSED, sp<AudioIoDescriptor>::make(input));
     }
     // FIXME: calling thread->exit() without mLock held should not be needed anymore now that
     // we have a different lock for notification client
@@ -3679,17 +3889,24 @@
             goto Exit;
         }
     } else if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
-        if (!isAudioServerUid(callingUid)) {
-            ALOGE("%s: only APM can create using AUDIO_SESSION_OUTPUT_STAGE", __func__);
-            lStatus = PERMISSION_DENIED;
-            goto Exit;
-        }
-
         if (io == AUDIO_IO_HANDLE_NONE) {
             ALOGE("%s: APM must specify output when using AUDIO_SESSION_OUTPUT_STAGE", __func__);
             lStatus = BAD_VALUE;
             goto Exit;
         }
+        PlaybackThread *thread = checkPlaybackThread_l(io);
+        if (thread == nullptr) {
+            ALOGE("%s: invalid output %d specified for AUDIO_SESSION_OUTPUT_STAGE", __func__, io);
+            lStatus = BAD_VALUE;
+            goto Exit;
+        }
+        if (!modifyDefaultAudioEffectsAllowed(adjAttributionSource)
+                && !isAudioServerUid(callingUid)) {
+            ALOGE("%s: effect on AUDIO_SESSION_OUTPUT_STAGE not granted for uid %d",
+                    __func__, callingUid);
+            lStatus = PERMISSION_DENIED;
+            goto Exit;
+        }
     } else if (sessionId == AUDIO_SESSION_DEVICE) {
         if (!modifyDefaultAudioEffectsAllowed(adjAttributionSource)) {
             ALOGE("%s: device effect permission denied for uid %d", __func__, callingUid);
@@ -3914,6 +4131,12 @@
 
 Register:
     if (!probe && (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS)) {
+        if (lStatus == ALREADY_EXISTS) {
+            response->alreadyExists = true;
+            lStatus = NO_ERROR;
+        } else {
+            response->alreadyExists = false;
+        }
         // Check CPU and memory usage
         sp<EffectBase> effect = handle->effect().promote();
         if (effect != nullptr) {
@@ -4006,47 +4229,87 @@
     // so that a new chain is created with correct parameters when first effect is added. This is
     // otherwise unnecessary as removeEffect_l() will remove the chain when last effect is
     // removed.
+    // TODO(b/216875016): consider holding the effect chain locks for the duration of the move.
     srcThread->removeEffectChain_l(chain);
 
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
     sp<EffectChain> dstChain;
-    uint32_t strategy = 0; // prevent compiler warning
     sp<EffectModule> effect = chain->getEffectFromId_l(0);
     Vector< sp<EffectModule> > removed;
     status_t status = NO_ERROR;
-    while (effect != 0) {
+    std::string errorString;
+    while (effect != nullptr) {
         srcThread->removeEffect_l(effect);
         removed.add(effect);
         status = dstThread->addEffect_l(effect);
         if (status != NO_ERROR) {
+            errorString = StringPrintf(
+                    "cannot add effect %p to destination thread", effect.get());
             break;
         }
-        // removeEffect_l() has stopped the effect if it was active so it must be restarted
-        if (effect->state() == EffectModule::ACTIVE ||
-                effect->state() == EffectModule::STOPPING) {
-            effect->start();
-        }
         // if the move request is not received from audio policy manager, the effect must be
-        // re-registered with the new strategy and output
-        if (dstChain == 0) {
+        // re-registered with the new strategy and output.
+
+        // We obtain the dstChain once the effect is on the new thread.
+        if (dstChain == nullptr) {
             dstChain = effect->getCallback()->chain().promote();
-            if (dstChain == 0) {
-                ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get());
+            if (dstChain == nullptr) {
+                errorString = StringPrintf("cannot get chain from effect %p", effect.get());
                 status = NO_INIT;
                 break;
             }
-            strategy = dstChain->strategy();
         }
         effect = chain->getEffectFromId_l(0);
     }
 
+    size_t restored = 0;
     if (status != NO_ERROR) {
-        for (size_t i = 0; i < removed.size(); i++) {
-            srcThread->addEffect_l(removed[i]);
+        dstChain.clear(); // dstChain is now from the srcThread (could be recreated).
+        for (const auto& effect : removed) {
+            dstThread->removeEffect_l(effect); // Note: Depending on error location, the last
+                                               // effect may not have been placed on dstThread.
+            if (srcThread->addEffect_l(effect) == NO_ERROR) {
+                ++restored;
+                if (dstChain == nullptr) {
+                    dstChain = effect->getCallback()->chain().promote();
+                }
+            }
         }
     }
 
+    // After all the effects have been moved to new thread (or put back) we restart the effects
+    // because removeEffect_l() has stopped the effect if it is currently active.
+    size_t started = 0;
+    if (dstChain != nullptr && !removed.empty()) {
+        // If we do not take the dstChain lock, it is possible that processing is ongoing
+        // while we are starting the effect.  This can cause glitches with volume,
+        // see b/202360137.
+        dstChain->lock();
+        for (const auto& effect : removed) {
+            if (effect->state() == EffectModule::ACTIVE ||
+                    effect->state() == EffectModule::STOPPING) {
+                ++started;
+                effect->start();
+            }
+        }
+        dstChain->unlock();
+    }
+
+    if (status != NO_ERROR) {
+        if (errorString.empty()) {
+            errorString = StringPrintf("%s: failed status %d", __func__, status);
+        }
+        ALOGW("%s: %s unsuccessful move of session %d from srcThread %p to dstThread %p "
+                "(%zu effects removed from srcThread, %zu effects restored to srcThread, "
+                "%zu effects started)",
+                __func__, errorString.c_str(), sessionId, srcThread, dstThread,
+                removed.size(), restored, started);
+    } else {
+        ALOGD("%s: successful move of session %d from srcThread %p to dstThread %p "
+                "(%zu effects moved, %zu effects started)",
+                __func__, sessionId, srcThread, dstThread, removed.size(), started);
+    }
     return status;
 }
 
@@ -4212,6 +4475,7 @@
         case TransactionCode::SET_AUDIO_PORT_CONFIG:
         case TransactionCode::SET_RECORD_SILENCED:
         case TransactionCode::AUDIO_POLICY_READY:
+        case TransactionCode::SET_DEVICE_CONNECTED_STATE:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
@@ -4279,9 +4543,20 @@
             break;
     }
 
-    std::string tag("IAudioFlinger command " +
-                    std::to_string(static_cast<std::underlying_type_t<TransactionCode>>(code)));
-    TimeCheck check(tag.c_str());
+    const std::string methodName = getIAudioFlingerStatistics().getMethodForCode(code);
+    mediautils::TimeCheck check(
+            std::string("IAudioFlinger::").append(methodName),
+            [code, methodName](bool timeout, float elapsedMs) { // don't move methodName.
+        if (timeout) {
+            mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT)
+                .set(AMEDIAMETRICS_PROP_METHODCODE, int64_t(code))
+                .set(AMEDIAMETRICS_PROP_METHODNAME, methodName.c_str())
+                .record();
+        } else {
+            getIAudioFlingerStatistics().event(code, elapsedMs);
+        }
+    });
 
     // Make sure we connect to Audio Policy Service before calling into AudioFlinger:
     //  - AudioFlinger can call into Audio Policy Service with its global mutex held
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 8fcd6e4..8e4383c 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -76,6 +76,7 @@
 #include <media/VolumeShaper.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/Synchronization.h>
+#include <mediautils/ThreadSnapshot.h>
 
 #include <audio_utils/clock.h>
 #include <audio_utils/FdToString.h>
@@ -282,6 +283,16 @@
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs);
 
+    virtual status_t getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos);
+
+    virtual int32_t getAAudioMixerBurstCount();
+
+    virtual int32_t getAAudioHardwareBurstMinUsec();
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected);
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
         const std::function<status_t()>& delegate) override;
 
@@ -750,7 +761,7 @@
               // no range check, AudioFlinger::mLock held
               bool streamMute_l(audio_stream_type_t stream) const
                                 { return mStreamTypes[stream].mute; }
-              void ioConfigChanged(audio_io_config_event event,
+              void ioConfigChanged(audio_io_config_event_t event,
                                    const sp<AudioIoDescriptor>& ioDesc,
                                    pid_t pid = 0);
 
@@ -904,6 +915,7 @@
         AUDIO_HW_SET_MASTER_MUTE,       // set_master_mute
         AUDIO_HW_GET_MASTER_MUTE,       // get_master_mute
         AUDIO_HW_GET_MICROPHONES,       // getMicrophones
+        AUDIO_HW_SET_CONNECTED_STATE,   // setConnectedState
     };
 
     mutable     hardware_call_state                 mHardwareStatus;    // for dump only
@@ -1004,6 +1016,11 @@
 
     // Keep in sync with java definition in media/java/android/media/AudioRecord.java
     static constexpr int32_t kMaxSharedAudioHistoryMs = 5000;
+
+    std::map<media::audio::common::AudioMMapPolicyType,
+             std::vector<media::audio::common::AudioMMapPolicyInfo>> mPolicyInfos;
+    int32_t mAAudioBurstsPerBuffer = 0;
+    int32_t mAAudioHwBurstMinMicros = 0;
 };
 
 #undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/AudioHwDevice.cpp
index 16b25f6..dee6161 100644
--- a/services/audioflinger/AudioHwDevice.cpp
+++ b/services/audioflinger/AudioHwDevice.cpp
@@ -29,6 +29,9 @@
 
 namespace android {
 
+using media::audio::common::AudioMMapPolicyInfo;
+using media::audio::common::AudioMMapPolicyType;
+
 // ----------------------------------------------------------------------------
 
 status_t AudioHwDevice::openOutputStream(
@@ -102,5 +105,18 @@
     return mHwDevice->getAudioPort(port);
 }
 
+status_t AudioHwDevice::getMmapPolicyInfos(
+            AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) const {
+    return mHwDevice->getMmapPolicyInfos(policyType, policyInfos);
+}
+
+int32_t AudioHwDevice::getAAudioMixerBurstCount() const {
+    return mHwDevice->getAAudioMixerBurstCount();
+}
+
+int32_t AudioHwDevice::getAAudioHardwareBurstMinUsec() const {
+    return mHwDevice->getAAudioHardwareBurstMinUsec();
+}
+
 
 }; // namespace android
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index fc2c693..8c5d239 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -22,6 +22,8 @@
 #include <stdlib.h>
 #include <sys/types.h>
 
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <media/audiohal/DeviceHalInterface.h>
 #include <utils/Errors.h>
 #include <system/audio.h>
@@ -85,6 +87,14 @@
 
     status_t getAudioPort(struct audio_port_v7 *port) const;
 
+    status_t getMmapPolicyInfos(
+            media::audio::common::AudioMMapPolicyType policyType,
+            std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) const;
+
+    int32_t getAAudioMixerBurstCount() const;
+
+    int32_t getAAudioHardwareBurstMinUsec() const;
+
 private:
     const audio_module_handle_t mHandle;
     const char * const          mModuleName;
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index d8565bd..65ec0e8 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -39,6 +39,7 @@
         , mRateMultiplier(1)
         , mHalFormatHasProportionalFrames(false)
         , mHalFrameSize(0)
+        , mExpectRetrograde(false)
 {
 }
 
@@ -69,8 +70,12 @@
     const uint32_t truncatedPosition = (uint32_t)mRenderPosition;
     int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
     (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
+
     if (deltaHalPosition > 0) {
         mRenderPosition += deltaHalPosition;
+    } else if (mExpectRetrograde) {
+        mExpectRetrograde = false;
+        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
     }
     // Scale from HAL sample rate to application rate.
     *frames = mRenderPosition / mRateMultiplier;
@@ -187,6 +192,7 @@
 int AudioStreamOut::flush()
 {
     mRenderPosition = 0;
+    mExpectRetrograde = false;
     mFramesWritten = 0;
     mFramesWrittenAtStandby = 0;
     status_t result = stream->flush();
@@ -196,6 +202,7 @@
 int AudioStreamOut::standby()
 {
     mRenderPosition = 0;
+    mExpectRetrograde = false;
     mFramesWrittenAtStandby = mFramesWritten;
     return stream->standby();
 }
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
index 565f43a..82fe238 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/AudioStreamOut.h
@@ -93,13 +93,21 @@
     virtual status_t flush();
     virtual status_t standby();
 
+    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+    // transitioning between tracks.
+    // The HAL resets the frame position without flush/stop being called, but calls back prior to
+    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+    // mRenderPosition.
+    virtual void presentationComplete() { mExpectRetrograde = true; }
+
 protected:
     uint64_t             mFramesWritten; // reset by flush
     uint64_t             mFramesWrittenAtStandby;
-    uint64_t             mRenderPosition; // reset by flush or standby
+    uint64_t             mRenderPosition; // reset by flush, standby, or presentation complete
     int                  mRateMultiplier;
     bool                 mHalFormatHasProportionalFrames;
     size_t               mHalFrameSize;
+    bool                 mExpectRetrograde; // see presentationComplete
 };
 
 } // namespace android
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 8d04edb..e6d7cf7 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -39,7 +39,9 @@
 #include <media/ShmemCompat.h>
 #include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
+#include <mediautils/TimeCheck.h>
 
 #include "AudioFlinger.h"
 
@@ -1751,6 +1753,47 @@
     disconnect(false);
 }
 
+// Creates an association between Binder code to name for IEffect.
+#define IEFFECT_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(enable) \
+BINDER_METHOD_ENTRY(disable) \
+BINDER_METHOD_ENTRY(command) \
+BINDER_METHOD_ENTRY(disconnect) \
+BINDER_METHOD_ENTRY(getCblk) \
+
+// singleton for Binder Method Statistics for IEffect
+mediautils::MethodStatistics<int>& getIEffectStatistics() {
+    using Code = int;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+        {(Code)media::BnEffect::TRANSACTION_##ENTRY, #ENTRY},
+
+    static mediautils::MethodStatistics<Code> methodStatistics{
+        IEFFECT_BINDER_METHOD_MACRO_LIST
+        METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+    };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+    return methodStatistics;
+}
+
+status_t AudioFlinger::EffectHandle::onTransact(
+        uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
+    const std::string methodName = getIEffectStatistics().getMethodForCode(code);
+    mediautils::TimeCheck check(
+            std::string("IEffect::").append(methodName),
+            [code](bool timeout, float elapsedMs) {
+        if (timeout) {
+            ; // we don't timeout right now on the effect interface.
+        } else {
+            getIEffectStatistics().event(code, elapsedMs);
+        }
+    }, 0 /* timeoutMs */);
+    return BnEffect::onTransact(code, data, reply, flags);
+}
+
 status_t AudioFlinger::EffectHandle::initCheck()
 {
     return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
@@ -3260,6 +3303,8 @@
         } else {
             mHalEffect->setDevices({mDevice});
         }
+        mHalEffect->configure();
+
         *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
                                    mNotifyFramesProcessed);
         status = (*handle)->initCheck();
@@ -3308,8 +3353,14 @@
 }
 
 void AudioFlinger::DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
-    Mutex::Autolock _l(mProxyLock);
-    mEffectHandles.erase(patchHandle);
+    sp<EffectHandle> effect;
+    {
+        Mutex::Autolock _l(mProxyLock);
+        if (mEffectHandles.find(patchHandle) != mEffectHandles.end()) {
+            effect = mEffectHandles.at(patchHandle);
+            mEffectHandles.erase(patchHandle);
+        }
+    }
 }
 
 
@@ -3317,6 +3368,7 @@
 {
     Mutex::Autolock _l(mProxyLock);
     if (effect == mHalEffect) {
+        mHalEffect->release_l();
         mHalEffect.clear();
         mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
     }
@@ -3464,7 +3516,7 @@
     if (proxy == nullptr) {
         return NO_INIT;
     }
-    return proxy->addEffectToHal(effect);
+    return proxy->removeEffectFromHal(effect);
 }
 
 bool AudioFlinger::DeviceEffectProxy::ProxyCallback::isOutput() const {
@@ -3516,4 +3568,22 @@
     return proxy->channelCount();
 }
 
+void AudioFlinger::DeviceEffectProxy::ProxyCallback::onEffectEnable(
+        const sp<EffectBase>& effectBase) {
+    sp<EffectModule> effect = effectBase->asEffectModule();
+    if (effect == nullptr) {
+        return;
+    }
+    effect->start();
+}
+
+void AudioFlinger::DeviceEffectProxy::ProxyCallback::onEffectDisable(
+        const sp<EffectBase>& effectBase) {
+    sp<EffectModule> effect = effectBase->asEffectModule();
+    if (effect == nullptr) {
+        return;
+    }
+    effect->stop();
+}
+
 } // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 5ebf483..42614cc 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -356,6 +356,8 @@
             const sp<media::IEffectClient>& effectClient,
             int32_t priority, bool notifyFramesProcessed);
     virtual ~EffectHandle();
+    status_t onTransact(
+            uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
     virtual status_t initCheck();
 
     // IEffect
@@ -766,8 +768,8 @@
         void resetVolume() override {}
         product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
         int32_t activeTrackCnt() const override { return 0; }
-        void onEffectEnable(const sp<EffectBase>& effect __unused) override {}
-        void onEffectDisable(const sp<EffectBase>& effect __unused) override {}
+        void onEffectEnable(const sp<EffectBase>& effect __unused) override;
+        void onEffectDisable(const sp<EffectBase>& effect __unused) override;
 
         wp<EffectChain> chain() const override { return nullptr; }
 
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index 034d161..17d4c37 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,4 @@
-gkasten@google.com
 hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
+philburk@google.com
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 93118b8..45dd258 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -575,6 +575,12 @@
 
     // create a special playback track to render to playback thread.
     // this track is given the same buffer as the PatchRecord buffer
+
+    // Default behaviour is to start as soon as possible to have the lowest possible latency even if
+    // it might glitch.
+    // Disable this behavior for FM Tuner source if no fast capture/mixer available.
+    const bool isFmBridge = mAudioPatch.sources[0].ext.device.type == AUDIO_DEVICE_IN_FM_TUNER;
+    const size_t frameCountToBeReady = isFmBridge && !usePassthruPatchRecord ? frameCount / 4 : 1;
     sp<PlaybackThread::PatchTrack> tempPatchTrack = new PlaybackThread::PatchTrack(
                                            mPlayback.thread().get(),
                                            streamType,
@@ -584,7 +590,9 @@
                                            frameCount,
                                            tempRecordTrack->buffer(),
                                            tempRecordTrack->bufferSize(),
-                                           outputFlags);
+                                           outputFlags,
+                                           {} /*timeout*/,
+                                           frameCountToBeReady);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 3cce998..6a138bb 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -82,7 +82,8 @@
                                 /** default behaviour is to start when there are as many frames
                                   * ready as possible (aka. Buffer is full). */
                                 size_t frameCountToBeReady = SIZE_MAX,
-                                float speed = 1.0f);
+                                float speed = 1.0f,
+                                bool isSpatialized = false);
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -193,8 +194,15 @@
        }
     }
 
+    static bool checkServerLatencySupported(
+            audio_format_t format, audio_output_flags_t flags) {
+        return audio_is_linear_pcm(format)
+                && (flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == 0;
+    }
+
     audio_output_flags_t getOutputFlags() const { return mFlags; }
     float getSpeed() const { return mSpeed; }
+    bool isSpatialized() const override { return mIsSpatialized; }
 
 protected:
     // for numerous
@@ -345,6 +353,7 @@
     audio_output_flags_t mFlags;
     TeePatches  mTeePatches;
     const float         mSpeed;
+    const bool          mIsSpatialized;
 };  // end of Track
 
 
diff --git a/services/audioflinger/PropertyUtils.cpp b/services/audioflinger/PropertyUtils.cpp
new file mode 100644
index 0000000..65e2533
--- /dev/null
+++ b/services/audioflinger/PropertyUtils.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <aaudio/AAudio.h>
+#include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
+#include <cutils/properties.h>
+
+#include "PropertyUtils.h"
+
+namespace android {
+
+using media::audio::common::AudioMMapPolicy;
+using media::audio::common::AudioMMapPolicyType;
+using media::audio::common::AudioMMapPolicyInfo;
+
+std::string getMmapPolicyProperty(AudioMMapPolicyType policyType) {
+    switch (policyType) {
+        case AudioMMapPolicyType::DEFAULT:
+            return "aaudio.mmap_policy";
+        case AudioMMapPolicyType::EXCLUSIVE:
+            return "aaudio.mmap_exclusive_policy";
+        default:
+            return "";
+    }
+}
+
+int getDefaultPolicyFromType(AudioMMapPolicyType policyType) {
+    switch (policyType) {
+        case AudioMMapPolicyType::EXCLUSIVE:
+            return AAUDIO_UNSPECIFIED;
+        case AudioMMapPolicyType::DEFAULT:
+        default:
+            return AAUDIO_POLICY_NEVER;
+    }
+}
+
+AudioMMapPolicy legacy2aidl_aaudio_policy_t_AudioMMapPolicy(aaudio_policy_t legacy) {
+    switch (legacy) {
+        case AAUDIO_POLICY_NEVER:
+            return AudioMMapPolicy::NEVER;
+        case AAUDIO_POLICY_AUTO:
+            return AudioMMapPolicy::AUTO;
+        case AAUDIO_POLICY_ALWAYS:
+            return AudioMMapPolicy::ALWAYS;
+        case AAUDIO_UNSPECIFIED:
+            return AudioMMapPolicy::UNSPECIFIED;
+        default:
+            ALOGE("%s unknown aaudio policy: %d", __func__, legacy);
+            return AudioMMapPolicy::UNSPECIFIED;
+    }
+}
+
+status_t getMmapPolicyInfosFromSystemProperty(
+        AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
+    AudioMMapPolicyInfo policyInfo;
+    const std::string propertyStr = getMmapPolicyProperty(policyType);
+    if (propertyStr.empty()) {
+        return BAD_VALUE;
+    }
+    policyInfo.mmapPolicy = legacy2aidl_aaudio_policy_t_AudioMMapPolicy(
+            property_get_int32(propertyStr.c_str(), getDefaultPolicyFromType(policyType)));
+    policyInfos->push_back(policyInfo);
+    return NO_ERROR;
+}
+
+int32_t getAAudioMixerBurstCountFromSystemProperty() {
+    static const int32_t sDefaultBursts = 2; // arbitrary, use 2 for double buffered
+    static const int32_t sMaxBursts = 1024; // arbitrary
+    static const char* sPropMixerBursts = "aaudio.mixer_bursts";
+    int32_t prop = property_get_int32(sPropMixerBursts, sDefaultBursts);
+    if (prop <= 0 || prop > sMaxBursts) {
+        ALOGE("%s: invalid value %d, use default %d", __func__, prop, sDefaultBursts);
+        prop = sDefaultBursts;
+    }
+    return prop;
+}
+
+int32_t getAAudioHardwareBurstMinUsecFromSystemProperty() {
+    static const int32_t sDefaultMicros = 1000; // arbitrary
+    static const int32_t sMaxMicros = 1000 * 1000; // arbitrary
+    static const char* sPropHwBurstMinUsec = "aaudio.hw_burst_min_usec";
+    int32_t prop = property_get_int32(sPropHwBurstMinUsec, sDefaultMicros);
+    if (prop <= 0 || prop > sMaxMicros) {
+        ALOGE("%s invalid value %d, use default %d", __func__, prop, sDefaultMicros);
+        prop = sDefaultMicros;
+    }
+    return prop;
+}
+
+} // namespace android
diff --git a/services/audioflinger/PropertyUtils.h b/services/audioflinger/PropertyUtils.h
new file mode 100644
index 0000000..fbf651a
--- /dev/null
+++ b/services/audioflinger/PropertyUtils.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+
+namespace android {
+
+status_t getMmapPolicyInfosFromSystemProperty(
+        media::audio::common::AudioMMapPolicyType policyType,
+        std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos);
+
+int32_t getAAudioMixerBurstCountFromSystemProperty();
+
+int32_t getAAudioHardwareBurstMinUsecFromSystemProperty();
+
+} // namespace android
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9fa
--- /dev/null
+++ b/services/audioflinger/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+  "presubmit": [
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
+    }
+  ]
+}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 16fe29b..07e82a8 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -65,6 +65,7 @@
 #include <media/nbaio/PipeReader.h>
 #include <media/nbaio/SourceAudioBufferProvider.h>
 #include <mediautils/BatteryNotifier.h>
+#include <mediautils/Process.h>
 
 #include <audiomanager/AudioManager.h>
 #include <powermanager/PowerManager.h>
@@ -626,7 +627,7 @@
     return status;
 }
 
-void AudioFlinger::ThreadBase::sendIoConfigEvent(audio_io_config_event event, pid_t pid,
+void AudioFlinger::ThreadBase::sendIoConfigEvent(audio_io_config_event_t event, pid_t pid,
                                                  audio_port_handle_t portId)
 {
     Mutex::Autolock _l(mLock);
@@ -634,7 +635,7 @@
 }
 
 // sendIoConfigEvent_l() must be called with ThreadBase::mLock held
-void AudioFlinger::ThreadBase::sendIoConfigEvent_l(audio_io_config_event event, pid_t pid,
+void AudioFlinger::ThreadBase::sendIoConfigEvent_l(audio_io_config_event_t event, pid_t pid,
                                                    audio_port_handle_t portId)
 {
     // The audio statistics history is exponentially weighted to forget events
@@ -644,6 +645,7 @@
     mIoJitterMs.reset();
     mLatencyMs.reset();
     mProcessTimeMs.reset();
+    mMonopipePipeDepthStats.reset();
     mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
 
     sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
@@ -922,6 +924,20 @@
 
     dprintf(fd, "  Local log:\n");
     mLocalLog.dump(fd, "   " /* prefix */, 40 /* lines */);
+
+    // --all does the statistics
+    bool dumpAll = false;
+    for (const auto &arg : args) {
+        if (arg == String16("--all")) {
+            dumpAll = true;
+        }
+    }
+    if (dumpAll || type() == SPATIALIZER) {
+        const std::string sched = mThreadSnapshot.toString();
+        if (!sched.empty()) {
+            (void)write(fd, sched.c_str(), sched.size());
+        }
+    }
 }
 
 void AudioFlinger::ThreadBase::dumpBase_l(int fd, const Vector<String16>& args __unused)
@@ -962,7 +978,8 @@
             || mType == MIXER
             || mType == DUPLICATING
             || mType == DIRECT
-            || mType == OFFLOAD) {
+            || mType == OFFLOAD
+            || mType == SPATIALIZER) {
         dprintf(fd, "  Timestamp stats: %s\n", mTimestampVerifier.toString().c_str());
         dprintf(fd, "  Timestamp corrected: %s\n", isTimestampCorrectionEnabled() ? "yes" : "no");
     }
@@ -988,6 +1005,12 @@
                 isOutput() ? "write" : "read",
                 mLatencyMs.toString().c_str());
     }
+
+    if (mMonopipePipeDepthStats.getN() > 0) {
+        dprintf(fd, "  Monopipe %s pipe depth stats: %s\n",
+            isOutput() ? "write" : "read",
+            mMonopipePipeDepthStats.toString().c_str());
+    }
 }
 
 void AudioFlinger::ThreadBase::dumpEffectChains_l(int fd, const Vector<String16>& args)
@@ -1946,6 +1969,12 @@
         item->setDouble(MM_PREFIX "latencyMs.mean", mLatencyMs.getMean());
         item->setDouble(MM_PREFIX "latencyMs.std", mLatencyMs.getStdDev());
     }
+    if (mMonopipePipeDepthStats.getN() > 0) {
+        item->setDouble(MM_PREFIX "monopipePipeDepthStats.mean",
+                        mMonopipePipeDepthStats.getMean());
+        item->setDouble(MM_PREFIX "monopipePipeDepthStats.std",
+                        mMonopipePipeDepthStats.getStdDev());
+    }
 
     item->selfrecord();
 }
@@ -2085,16 +2114,15 @@
         }
     }
     run(mThreadName, ANDROID_PRIORITY_URGENT_AUDIO);
+    mThreadSnapshot.setTid(getTid());
 }
 
 // ThreadBase virtuals
 void AudioFlinger::PlaybackThread::preExit()
 {
     ALOGV("  preExit()");
-    // FIXME this is using hard-coded strings but in the future, this functionality will be
-    //       converted to use audio HAL extensions required to support tunneling
-    status_t result = mOutput->stream->setParameters(String8("exiting=1"));
-    ALOGE_IF(result != OK, "Error when setting parameters on exit: %d", result);
+    status_t result = mOutput->stream->exit();
+    ALOGE_IF(result != OK, "Error when calling exit(): %d", result);
 }
 
 void AudioFlinger::PlaybackThread::dumpTracks_l(int fd, const Vector<String16>& args __unused)
@@ -2217,7 +2245,8 @@
         pid_t tid,
         status_t *status,
         audio_port_handle_t portId,
-        const sp<media::IAudioTrackCallback>& callback)
+        const sp<media::IAudioTrackCallback>& callback,
+        bool isSpatialized)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2303,7 +2332,7 @@
                  "AUDIO_OUTPUT_FLAG_FAST accepted: frameCount=%zu mFrameCount=%zu",
                  frameCount, mFrameCount);
       } else {
-        ALOGV("AUDIO_OUTPUT_FLAG_FAST denied: sharedBuffer=%p frameCount=%zu "
+        ALOGD("AUDIO_OUTPUT_FLAG_FAST denied: sharedBuffer=%p frameCount=%zu "
                 "mFrameCount=%zu format=%#x mFormat=%#x isLinear=%d channelMask=%#x "
                 "sampleRate=%u mSampleRate=%u "
                 "hasFastMixer=%d tid=%d fastTrackAvailMask=%#x",
@@ -2509,7 +2538,8 @@
                           channelMask, frameCount,
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
                           sessionId, creatorPid, attributionSource, trackFlags,
-                          TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/, speed);
+                          TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
+                          speed, isSpatialized);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2802,36 +2832,26 @@
     return mOutput->stream->selectPresentation(presentationId, programId);
 }
 
-void AudioFlinger::PlaybackThread::ioConfigChanged(audio_io_config_event event, pid_t pid,
+void AudioFlinger::PlaybackThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
                                                    audio_port_handle_t portId) {
-    sp<AudioIoDescriptor> desc = new AudioIoDescriptor();
     ALOGV("PlaybackThread::ioConfigChanged, thread %p, event %d", this, event);
-
-    desc->mIoHandle = mId;
-    struct audio_patch patch = mPatch;
-    if (isMsdDevice()) {
-        patch = mDownStreamPatch;
-    }
-
+    sp<AudioIoDescriptor> desc;
+    const struct audio_patch patch = isMsdDevice() ? mDownStreamPatch : mPatch;
     switch (event) {
     case AUDIO_OUTPUT_OPENED:
     case AUDIO_OUTPUT_REGISTERED:
     case AUDIO_OUTPUT_CONFIG_CHANGED:
-        desc->mPatch = patch;
-        desc->mChannelMask = mChannelMask;
-        desc->mSamplingRate = mSampleRate;
-        desc->mFormat = mFormat;
-        desc->mFrameCount = mNormalFrameCount; // FIXME see
-                                             // AudioFlinger::frameCount(audio_io_handle_t)
-        desc->mFrameCountHAL = mFrameCount;
-        desc->mLatency = latency_l();
+        desc = sp<AudioIoDescriptor>::make(mId, patch, false /*isInput*/,
+                mSampleRate, mFormat, mChannelMask,
+                // FIXME AudioFlinger::frameCount(audio_io_handle_t) instead of mNormalFrameCount?
+                mNormalFrameCount, mFrameCount, latency_l());
         break;
     case AUDIO_CLIENT_STARTED:
-        desc->mPatch = patch;
-        desc->mPortId = portId;
+        desc = sp<AudioIoDescriptor>::make(mId, patch, portId);
         break;
     case AUDIO_OUTPUT_CLOSED:
     default:
+        desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
     mAudioFlinger->ioConfigChanged(event, desc, pid);
@@ -2988,6 +3008,7 @@
 
     // Calculate size of normal sink buffer relative to the HAL output buffer size
     double multiplier = 1.0;
+    // Note: mType == SPATIALIZER does not support FastMixer.
     if (mType == MIXER && (kUseFastMixer == FastMixer_Static ||
             kUseFastMixer == FastMixer_Dynamic)) {
         size_t minNormalFrameCount = (kMinNormalSinkBufferSizeMs * mSampleRate) / 1000;
@@ -3336,6 +3357,7 @@
     mInWrite = false;
     if (mStandby) {
         mThreadMetrics.logBeginInterval();
+        mThreadSnapshot.onBegin();
         mStandby = false;
     }
     return bytesWritten;
@@ -3448,7 +3470,7 @@
     sp<EffectBufferHalInterface> halInBuffer, halOutBuffer;
     effect_buffer_t *buffer = nullptr; // only used for non global sessions
 
-    if (mType == SPATIALIZER ) {
+    if (mType == SPATIALIZER) {
         if (!audio_is_global_session(session)) {
             // player sessions on a spatializer output will use a dedicated input buffer and
             // will either output multi channel to mEffectBuffer if the track is spatilaized
@@ -3674,7 +3696,7 @@
     cacheParameters_l();
     mSleepTimeUs = mIdleSleepTimeUs;
 
-    if (mType == MIXER) {
+    if (mType == MIXER || mType == SPATIALIZER) {
         sleepTimeShift = 0;
     }
 
@@ -3821,6 +3843,7 @@
                     if (!mStandby) {
                         LOG_AUDIO_STATE();
                         mThreadMetrics.logEndInterval();
+                        mThreadSnapshot.onEnd();
                         mStandby = true;
                     }
                     sendStatistics(false /* force */);
@@ -3851,7 +3874,7 @@
 
                     mStandbyTimeNs = systemTime() + mStandbyDelayNs;
                     mSleepTimeUs = mIdleSleepTimeUs;
-                    if (mType == MIXER) {
+                    if (mType == MIXER || mType == SPATIALIZER) {
                         sleepTimeShift = 0;
                     }
 
@@ -3881,14 +3904,14 @@
                             && effectChain->containsHapticGeneratingEffect_l()) {
                         activeHapticSessionId = track->sessionId();
                         isHapticSessionSpatialized =
-                                mType == SPATIALIZER && track->canBeSpatialized();
+                                mType == SPATIALIZER && track->isSpatialized();
                         break;
                     }
                     if (activeHapticSessionId == AUDIO_SESSION_NONE
                             && track->getHapticPlaybackEnabled()) {
                         activeHapticSessionId = track->sessionId();
                         isHapticSessionSpatialized =
-                                mType == SPATIALIZER && track->canBeSpatialized();
+                                mType == SPATIALIZER && track->isSpatialized();
                     }
                 }
             }
@@ -4112,11 +4135,24 @@
                                 Mutex::Autolock _l(mLock);
                                 mIoJitterMs.add(jitterMs);
                                 mProcessTimeMs.add(processMs);
+
+                                if (mPipeSink.get() != nullptr) {
+                                    // Using the Monopipe availableToWrite, we estimate the current
+                                    // buffer size.
+                                    MonoPipe* monoPipe = static_cast<MonoPipe*>(mPipeSink.get());
+                                    const ssize_t
+                                            availableToWrite = mPipeSink->availableToWrite();
+                                    const size_t pipeFrames = monoPipe->maxFrames();
+                                    const size_t
+                                            remainingFrames = pipeFrames - max(availableToWrite, 0);
+                                    mMonopipePipeDepthStats.add(remainingFrames);
+                                }
                             }
 
                             // write blocked detection
                             const int64_t deltaWriteNs = lastIoEndNs - lastIoBeginNs;
-                            if (mType == MIXER && deltaWriteNs > maxPeriod) {
+                            if ((mType == MIXER || mType == SPATIALIZER)
+                                    && deltaWriteNs > maxPeriod) {
                                 mNumDelayedWrites++;
                                 if ((lastIoEndNs - lastWarning) > kWarningThrottleNs) {
                                     ATRACE_NAME("underrun");
@@ -4137,7 +4173,7 @@
                         (mMixerStatus == MIXER_DRAIN_ALL)) {
                     threadLoop_drain();
                 }
-                if (mType == MIXER && !mStandby) {
+                if ((mType == MIXER || mType == SPATIALIZER) && !mStandby) {
 
                     if (mThreadThrottle
                             && mMixerStatus == MIXER_TRACKS_READY // we are mixing (active tracks)
@@ -4262,13 +4298,6 @@
 
 void AudioFlinger::PlaybackThread::collectTimestamps_l()
 {
-    // Collect timestamp statistics for the Playback Thread types that support it.
-    if (mType != MIXER
-            && mType != DUPLICATING
-            && mType != DIRECT
-            && mType != OFFLOAD) {
-        return;
-    }
     if (mStandby) {
         mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
         return;
@@ -4570,19 +4599,7 @@
                                             patch->sinks,
                                             handle);
     } else {
-        char *address;
-        if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
-            //FIXME: we only support address on first sink with HAL version < 3.0
-            address = audio_device_address_to_parameter(
-                                                        patch->sinks[0].ext.device.type,
-                                                        patch->sinks[0].ext.device.address);
-        } else {
-            address = (char *)calloc(1, 1);
-        }
-        AudioParameter param = AudioParameter(String8(address));
-        free(address);
-        param.addInt(String8(AudioParameter::keyRouting), (int)type);
-        status = mOutput->stream->setParameters(param.toString());
+        status = mOutput->stream->legacyCreateAudioPatch(patch->sinks[0], std::nullopt, type);
         *handle = AUDIO_PATCH_HANDLE_NONE;
     }
     const std::string patchSinksAsString = patchSinksToString(patch);
@@ -4627,9 +4644,7 @@
         sp<DeviceHalInterface> hwDevice = mOutput->audioHwDev->hwDevice();
         status = hwDevice->releaseAudioPatch(handle);
     } else {
-        AudioParameter param;
-        param.addInt(String8(AudioParameter::keyRouting), 0);
-        status = mOutput->stream->setParameters(param.toString());
+        status = mOutput->stream->legacyReleaseAudioPatch();
     }
     return status;
 }
@@ -4712,12 +4727,7 @@
             break;
         case FastMixer_Static:
         case FastMixer_Dynamic:
-            // FastMixer was designed to operate with a HAL that pulls at a regular rate,
-            // where the period is less than an experimentally determined threshold that can be
-            // scheduled reliably with CFS. However, the BT A2DP HAL is
-            // bursty (does not pull at a regular rate) and so cannot operate with FastMixer.
-            initFastMixer = mFrameCount < mNormalFrameCount
-                    && Intersection(outDeviceTypes(), getAudioDeviceOutAllA2dpSet()).empty();
+            initFastMixer = mFrameCount < mNormalFrameCount;
             break;
         }
         ALOGW_IF(initFastMixer == false && mFrameCount < mNormalFrameCount,
@@ -5575,7 +5585,7 @@
                 AudioMixer::TRACK,
                 AudioMixer::CHANNEL_MASK, (void *)(uintptr_t)track->channelMask());
 
-            if (mType == SPATIALIZER && !track->canBeSpatialized()) {
+            if (mType == SPATIALIZER && !track->isSpatialized()) {
                 mAudioMixer->setParameter(
                     trackId,
                     AudioMixer::TRACK,
@@ -5625,7 +5635,7 @@
             if (mMixerBufferEnabled
                     && (track->mainBuffer() == mSinkBuffer
                             || track->mainBuffer() == mMixerBuffer)) {
-                if (mType == SPATIALIZER && !track->canBeSpatialized()) {
+                if (mType == SPATIALIZER && !track->isSpatialized()) {
                     mAudioMixer->setParameter(
                             trackId,
                             AudioMixer::TRACK,
@@ -5879,6 +5889,20 @@
     return trackCount;
 }
 
+bool AudioFlinger::PlaybackThread::checkRunningTimestamp()
+{
+    uint64_t position = 0;
+    struct timespec unused;
+    const status_t ret = mOutput->getPresentationPosition(&position, &unused);
+    if (ret == NO_ERROR) {
+        if (position != mLastCheckedTimestampPosition) {
+            mLastCheckedTimestampPosition = position;
+            return true;
+        }
+    }
+    return false;
+}
+
 // isTrackAllowed_l() must be called with ThreadBase::mLock held
 bool AudioFlinger::MixerThread::isTrackAllowed_l(
         audio_channel_mask_t channelMask, audio_format_t format,
@@ -5949,6 +5973,7 @@
             mOutput->standby();
             if (!mStandby) {
                 mThreadMetrics.logEndInterval();
+                mThreadSnapshot.onEnd();
                 mStandby = true;
             }
             mBytesWritten = 0;
@@ -6291,9 +6316,13 @@
                     track->isStopping_2() || track->isPaused()) {
                 // We have consumed all the buffers of this track.
                 // Remove it from the list of active tracks.
+                bool presComplete = false;
                 if (mStandby || !last ||
-                        track->presentationComplete(latency_l()) ||
+                        (presComplete = track->presentationComplete(latency_l())) ||
                         track->isPaused() || mHwPaused) {
+                    if (presComplete) {
+                        mOutput->presentationComplete();
+                    }
                     if (track->isStopping_2()) {
                         track->mState = TrackBase::STOPPED;
                     }
@@ -6307,19 +6336,24 @@
                 // fill a buffer, then remove it from active list.
                 // Only consider last track started for mixer state control
                 if (--(track->mRetryCount) <= 0) {
-                    ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
-                    tracksToRemove->add(track);
-                    // indicate to client process that the track was disabled because of underrun;
-                    // it will then automatically call start() when data is available
-                    track->disable();
-                    // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
-                    // unlike mixerthread, HAL can be paused for direct output
-                    ALOGW("pause because of UNDERRUN, framesReady = %zu,"
-                            "minFrames = %u, mFormat = %#x",
-                            framesReady, minFrames, mFormat);
-                    if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
-                        doHwPause = true;
-                        mHwPaused = true;
+                    const bool running = checkRunningTimestamp();
+                    if (running) { // still running, give us more time.
+                        track->mRetryCount = kMaxTrackRetriesOffload;
+                    } else {
+                        ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
+                        tracksToRemove->add(track);
+                        // indicate to client process that the track was disabled because of
+                        // underrun; it will then automatically call start() when data is available
+                        track->disable();
+                        // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
+                        // unlike mixerthread, HAL can be paused for direct output
+                        ALOGW("pause because of UNDERRUN, framesReady = %zu,"
+                                "minFrames = %u, mFormat = %#x",
+                                framesReady, minFrames, mFormat);
+                        if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
+                            doHwPause = true;
+                            mHwPaused = true;
+                        }
                     }
                 } else if (last) {
                     mixerStatus = MIXER_TRACKS_ENABLED;
@@ -6465,6 +6499,7 @@
             mOutput->standby();
             if (!mStandby) {
                 mThreadMetrics.logEndInterval();
+                mThreadSnapshot.onEnd();
                 mStandby = true;
             }
             mBytesWritten = 0;
@@ -6530,6 +6565,7 @@
 
 void AudioFlinger::DirectOutputThread::flushHw_l()
 {
+    PlaybackThread::flushHw_l();
     mOutput->flush();
     mHwPaused = false;
     mFlushPending = false;
@@ -6665,8 +6701,7 @@
 AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
         AudioStreamOut* output, audio_io_handle_t id, bool systemReady)
     :   DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady),
-        mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true),
-        mOffloadUnderrunPosition(~0LL)
+        mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true)
 {
     //FIXME: mStandby should be set to true by ThreadBase constructo
     mStandby = true;
@@ -6866,7 +6901,8 @@
                 // Drain has completed or we are in standby, signal presentation complete
                 if (!(mDrainSequence & 1) || !last || mStandby) {
                     track->mState = TrackBase::STOPPED;
-                    track->presentationComplete(latency_l());
+                    mOutput->presentationComplete();
+                    track->presentationComplete(latency_l()); // always returns true
                     track->reset();
                     tracksToRemove->add(track);
                     // OFFLOADED stop resets frame counts.
@@ -6883,19 +6919,7 @@
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
                 if (--(track->mRetryCount) <= 0) {
-                    bool running = false;
-                    uint64_t position = 0;
-                    struct timespec unused;
-                    // The running check restarts the retry counter at least once.
-                    status_t ret = mOutput->stream->getPresentationPosition(&position, &unused);
-                    if (ret == NO_ERROR && position != mOffloadUnderrunPosition) {
-                        running = true;
-                        mOffloadUnderrunPosition = position;
-                    }
-                    if (ret == NO_ERROR) {
-                        ALOGVV("underrun counter, running(%d): %lld vs %lld", running,
-                                (long long)position, (long long)mOffloadUnderrunPosition);
-                    }
+                    const bool running = checkRunningTimestamp();
                     if (running) { // still running, give us more time.
                         track->mRetryCount = kMaxTrackRetriesOffload;
                     } else {
@@ -6966,7 +6990,6 @@
     mPausedBytesRemaining = 0;
     // reset bytes written count to reflect that DSP buffers are empty after flush.
     mBytesWritten = 0;
-    mOffloadUnderrunPosition = ~0LL;
 
     if (mUseAsyncWrite) {
         // discard any pending drain or write ack by incrementing sequence
@@ -7064,6 +7087,7 @@
     }
     if (mStandby) {
         mThreadMetrics.logBeginInterval();
+        mThreadSnapshot.onBegin();
         mStandby = false;
     }
     return (ssize_t)mSinkBufferSize;
@@ -7589,6 +7613,7 @@
                     doBroadcast = true;
                     if (mStandby) {
                         mThreadMetrics.logBeginInterval();
+                        mThreadSnapshot.onBegin();
                         mStandby = false;
                     }
                     activeTrack->mState = TrackBase::ACTIVE;
@@ -7761,6 +7786,7 @@
 
             const ssize_t availableToRead = mPipeSource->availableToRead();
             if (availableToRead >= 0) {
+                mMonopipePipeDepthStats.add(availableToRead);
                 // PipeSource is the primary clock.  It is up to the AudioRecord client to keep up.
                 LOG_ALWAYS_FATAL_IF((size_t)availableToRead > mPipeFramesP2,
                         "more frames to read than fifo size, %zd > %zu",
@@ -8069,6 +8095,7 @@
     if (!mStandby) {
         inputStandBy();
         mThreadMetrics.logEndInterval();
+        mThreadSnapshot.onEnd();
         mStandby = true;
     }
 }
@@ -8189,6 +8216,7 @@
       if (
             // we formerly checked for a callback handler (non-0 tid),
             // but that is no longer required for TRANSFER_OBTAIN mode
+            // No need to match hardware format, format conversion will be done in client side.
             //
             // Frame count is not specified (0), or is less than or equal the pipe depth.
             // It is OK to provide a higher capacity than requested.
@@ -8196,8 +8224,6 @@
             (frameCount <= mPipeFramesP2) &&
             // PCM data
             audio_is_linear_pcm(format) &&
-            // hardware format
-            (format == mFormat) &&
             // hardware channel mask
             (channelMask == mChannelMask) &&
             // hardware sample rate
@@ -8947,30 +8973,22 @@
     return String8();
 }
 
-void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event event, pid_t pid,
+void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
                                                  audio_port_handle_t portId) {
-    sp<AudioIoDescriptor> desc = new AudioIoDescriptor();
-
-    desc->mIoHandle = mId;
-
+    sp<AudioIoDescriptor> desc;
     switch (event) {
     case AUDIO_INPUT_OPENED:
     case AUDIO_INPUT_REGISTERED:
     case AUDIO_INPUT_CONFIG_CHANGED:
-        desc->mPatch = mPatch;
-        desc->mChannelMask = mChannelMask;
-        desc->mSamplingRate = mSampleRate;
-        desc->mFormat = mFormat;
-        desc->mFrameCount = mFrameCount;
-        desc->mFrameCountHAL = mFrameCount;
-        desc->mLatency = 0;
+        desc = sp<AudioIoDescriptor>::make(mId, mPatch, true /*isInput*/,
+                mSampleRate, mFormat, mChannelMask, mFrameCount, mFrameCount);
         break;
     case AUDIO_CLIENT_STARTED:
-        desc->mPatch = mPatch;
-        desc->mPortId = portId;
+        desc = sp<AudioIoDescriptor>::make(mId, mPatch, portId);
         break;
     case AUDIO_INPUT_CLOSED:
     default:
+        desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
     mAudioFlinger->ioConfigChanged(event, desc, pid);
@@ -9122,21 +9140,9 @@
                                             patch->sinks,
                                             handle);
     } else {
-        char *address;
-        if (strcmp(patch->sources[0].ext.device.address, "") != 0) {
-            address = audio_device_address_to_parameter(
-                                                patch->sources[0].ext.device.type,
-                                                patch->sources[0].ext.device.address);
-        } else {
-            address = (char *)calloc(1, 1);
-        }
-        AudioParameter param = AudioParameter(String8(address));
-        free(address);
-        param.addInt(String8(AudioParameter::keyRouting),
-                     (int)patch->sources[0].ext.device.type);
-        param.addInt(String8(AudioParameter::keyInputSource),
-                                         (int)patch->sinks[0].ext.mix.usecase.source);
-        status = mInput->stream->setParameters(param.toString());
+        status = mInput->stream->legacyCreateAudioPatch(patch->sources[0],
+                                                        patch->sinks[0].ext.mix.usecase.source,
+                                                        patch->sources[0].ext.device.type);
         *handle = AUDIO_PATCH_HANDLE_NONE;
     }
 
@@ -9168,9 +9174,7 @@
         sp<DeviceHalInterface> hwDevice = mInput->audioHwDev->hwDevice();
         status = hwDevice->releaseAudioPatch(handle);
     } else {
-        AudioParameter param;
-        param.addInt(String8(AudioParameter::keyRouting), 0);
-        status = mInput->stream->setParameters(param.toString());
+        status = mInput->stream->legacyReleaseAudioPatch();
     }
     return status;
 }
@@ -9477,6 +9481,7 @@
     }
     if (mStandby) {
         mThreadMetrics.logBeginInterval();
+        mThreadSnapshot.onBegin();
         mStandby = false;
     }
     return NO_ERROR;
@@ -9516,6 +9521,7 @@
                 (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
         audio_port_handle_t deviceId = mDeviceId;
         std::vector<audio_io_handle_t> secondaryOutputs;
+        bool isSpatialized;
         ret = AudioSystem::getOutputForAttr(&mAttr, &io,
                                             mSessionId,
                                             &stream,
@@ -9524,7 +9530,8 @@
                                             flags,
                                             &deviceId,
                                             &portId,
-                                            &secondaryOutputs);
+                                            &secondaryOutputs,
+                                            &isSpatialized);
         ALOGD_IF(!secondaryOutputs.empty(),
                  "MmapThread::start does not support secondary outputs, ignoring them");
     } else {
@@ -9682,6 +9689,7 @@
     mHalStream->standby();
     if (!mStandby) {
         mThreadMetrics.logEndInterval();
+        mThreadSnapshot.onEnd();
         mStandby = true;
     }
     releaseWakeLock();
@@ -9824,31 +9832,26 @@
     return String8();
 }
 
-void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event event, pid_t pid,
+void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
                                                audio_port_handle_t portId __unused) {
-    sp<AudioIoDescriptor> desc = new AudioIoDescriptor();
-
-    desc->mIoHandle = mId;
-
+    sp<AudioIoDescriptor> desc;
+    bool isInput = false;
     switch (event) {
     case AUDIO_INPUT_OPENED:
     case AUDIO_INPUT_REGISTERED:
     case AUDIO_INPUT_CONFIG_CHANGED:
+        isInput = true;
+        FALLTHROUGH_INTENDED;
     case AUDIO_OUTPUT_OPENED:
     case AUDIO_OUTPUT_REGISTERED:
     case AUDIO_OUTPUT_CONFIG_CHANGED:
-        desc->mPatch = mPatch;
-        desc->mChannelMask = mChannelMask;
-        desc->mSamplingRate = mSampleRate;
-        desc->mFormat = mFormat;
-        desc->mFrameCount = mFrameCount;
-        desc->mFrameCountHAL = mFrameCount;
-        desc->mLatency = 0;
+        desc = sp<AudioIoDescriptor>::make(mId, mPatch, isInput,
+                mSampleRate, mFormat, mChannelMask, mFrameCount, mFrameCount);
         break;
-
     case AUDIO_INPUT_CLOSED:
     case AUDIO_OUTPUT_CLOSED:
     default:
+        desc = sp<AudioIoDescriptor>::make(mId);
         break;
     }
     mAudioFlinger->ioConfigChanged(event, desc, pid);
@@ -9905,29 +9908,18 @@
     }
 
     if (mAudioHwDev->supportsAudioPatches()) {
-        status = mHalDevice->createAudioPatch(patch->num_sources,
-                                            patch->sources,
-                                            patch->num_sinks,
-                                            patch->sinks,
-                                            handle);
+        status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
+                                              patch->sinks, handle);
     } else {
-        char *address;
-        if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
-            //FIXME: we only support address on first sink with HAL version < 3.0
-            address = audio_device_address_to_parameter(
-                                                        patch->sinks[0].ext.device.type,
-                                                        patch->sinks[0].ext.device.address);
+        audio_port_config port;
+        std::optional<audio_source_t> source;
+        if (isOutput()) {
+            port = patch->sinks[0];
         } else {
-            address = (char *)calloc(1, 1);
+            port = patch->sources[0];
+            source = patch->sinks[0].ext.mix.usecase.source;
         }
-        AudioParameter param = AudioParameter(String8(address));
-        free(address);
-        param.addInt(String8(AudioParameter::keyRouting), (int)type);
-        if (!isOutput()) {
-            param.addInt(String8(AudioParameter::keyInputSource),
-                                         (int)patch->sinks[0].ext.mix.usecase.source);
-        }
-        status = mHalStream->setParameters(param.toString());
+        status = mHalStream->legacyCreateAudioPatch(port, source, type);
         *handle = AUDIO_PATCH_HANDLE_NONE;
     }
 
@@ -9966,9 +9958,7 @@
     if (supportsAudioPatches) {
         status = mHalDevice->releaseAudioPatch(handle);
     } else {
-        AudioParameter param;
-        param.addInt(String8(AudioParameter::keyRouting), 0);
-        status = mHalStream->setParameters(param.toString());
+        status = mHalStream->legacyReleaseAudioPatch();
     }
     return status;
 }
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index cf52a8c..074ae8f 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -113,7 +113,7 @@
 
     class IoConfigEventData : public ConfigEventData {
     public:
-        IoConfigEventData(audio_io_config_event event, pid_t pid,
+        IoConfigEventData(audio_io_config_event_t event, pid_t pid,
                           audio_port_handle_t portId) :
             mEvent(event), mPid(pid), mPortId(portId) {}
 
@@ -121,14 +121,14 @@
             snprintf(buffer, size, "- IO event: event %d\n", mEvent);
         }
 
-        const audio_io_config_event mEvent;
+        const audio_io_config_event_t mEvent;
         const pid_t                 mPid;
         const audio_port_handle_t   mPortId;
     };
 
     class IoConfigEvent : public ConfigEvent {
     public:
-        IoConfigEvent(audio_io_config_event event, pid_t pid, audio_port_handle_t portId) :
+        IoConfigEvent(audio_io_config_event_t event, pid_t pid, audio_port_handle_t portId) :
             ConfigEvent(CFG_EVENT_IO) {
             mData = new IoConfigEventData(event, pid, portId);
         }
@@ -332,15 +332,15 @@
                                                     status_t& status) = 0;
     virtual     status_t    setParameters(const String8& keyValuePairs);
     virtual     String8     getParameters(const String8& keys) = 0;
-    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+    virtual     void        ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
                                         audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE) = 0;
                 // sendConfigEvent_l() must be called with ThreadBase::mLock held
                 // Can temporarily release the lock if waiting for a reply from
                 // processConfigEvents_l().
                 status_t    sendConfigEvent_l(sp<ConfigEvent>& event);
-                void        sendIoConfigEvent(audio_io_config_event event, pid_t pid = 0,
+                void        sendIoConfigEvent(audio_io_config_event_t event, pid_t pid = 0,
                                               audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
-                void        sendIoConfigEvent_l(audio_io_config_event event, pid_t pid = 0,
+                void        sendIoConfigEvent_l(audio_io_config_event_t event, pid_t pid = 0,
                                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
                 void        sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio, bool forApp);
                 void        sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio, bool forApp);
@@ -483,7 +483,7 @@
                             if (track->isFastTrack()) {
                                 result |= FAST_SESSION;  // caution, only represents first track.
                             }
-                            if (track->canBeSpatialized()) {
+                            if (track->isSpatialized()) {
                                 result |= SPATIALIZED_SESSION;  // caution, only first track.
                             }
                             break;
@@ -687,10 +687,14 @@
                 int64_t                 mLastIoBeginNs = -1;
                 int64_t                 mLastIoEndNs = -1;
 
+                // ThreadSnapshot is thread-safe (internally locked)
+                mediautils::ThreadSnapshot mThreadSnapshot;
+
                 // This should be read under ThreadBase lock (if not on the threadLoop thread).
                 audio_utils::Statistics<double> mIoJitterMs{0.995 /* alpha */};
                 audio_utils::Statistics<double> mProcessTimeMs{0.995 /* alpha */};
                 audio_utils::Statistics<double> mLatencyMs{0.995 /* alpha */};
+                audio_utils::Statistics<double> mMonopipePipeDepthStats{0.999 /* alpha */};
 
                 // Save the last count when we delivered statistics to mediametrics.
                 int64_t                 mLastRecordedTimestampVerifierN = 0;
@@ -959,7 +963,8 @@
                                 pid_t tid,
                                 status_t *status /*non-NULL*/,
                                 audio_port_handle_t portId,
-                                const sp<media::IAudioTrackCallback>& callback);
+                                const sp<media::IAudioTrackCallback>& callback,
+                                bool isSpatialized);
 
                 AudioStreamOut* getOutput() const;
                 AudioStreamOut* clearOutput();
@@ -979,7 +984,7 @@
                                 { return android_atomic_acquire_load(&mSuspended) > 0; }
 
     virtual     String8     getParameters(const String8& keys);
-    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+    virtual     void        ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
                                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
                 status_t    getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames);
                 // Consider also removing and passing an explicit mMainBuffer initialization
@@ -1375,6 +1380,14 @@
                 struct audio_patch mDownStreamPatch;
 
                 std::atomic_bool mCheckOutputStageEffects{};
+
+                // A differential check on the timestamps to see if there is a change in the
+                // timestamp frame position between the last call to checkRunningTimestamp.
+                uint64_t mLastCheckedTimestampPosition = ~0LL;
+
+                bool checkRunningTimestamp();
+
+    virtual     void flushHw_l() { mLastCheckedTimestampPosition = ~0LL; }
 };
 
 class MixerThread : public PlaybackThread {
@@ -1492,7 +1505,7 @@
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
                                                    status_t& status);
 
-    virtual     void        flushHw_l();
+                void        flushHw_l() override;
 
                 void        setMasterBalance(float balance) override;
 
@@ -1557,7 +1570,7 @@
     OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
                   audio_io_handle_t id, bool systemReady);
     virtual                 ~OffloadThread() {};
-    virtual     void        flushHw_l();
+                void        flushHw_l() override;
 
 protected:
     // threadLoop snippets
@@ -1574,10 +1587,6 @@
     size_t      mPausedWriteLength;     // length in bytes of write interrupted by pause
     size_t      mPausedBytesRemaining;  // bytes still waiting in mixbuffer after resume
     bool        mKeepWakeLock;          // keep wake lock while waiting for write callback
-    uint64_t    mOffloadUnderrunPosition; // Current frame position for offloaded playback
-                                          // used and valid only during underrun.  ~0 if
-                                          // no underrun has occurred during playback and
-                                          // is not reset on standby.
 };
 
 class AsyncCallbackThread : public Thread {
@@ -1801,7 +1810,7 @@
                                                status_t& status);
     virtual void        cacheParameters_l() {}
     virtual String8     getParameters(const String8& keys);
-    virtual void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+    virtual void        ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
                                         audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
     virtual status_t    createAudioPatch_l(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle);
@@ -1859,6 +1868,8 @@
 
             bool        isTimestampCorrectionEnabled() const override {
                             // checks popcount for exactly one device.
+                            // Is currently disabled. Before enabling,
+                            // verify compressed record timestamps.
                             return audio_is_input_device(mTimestampCorrectedDevice)
                                     && inDeviceType() == mTimestampCorrectedDevice;
                         }
@@ -2010,7 +2021,7 @@
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
                                                     status_t& status);
     virtual     String8     getParameters(const String8& keys);
-    virtual     void        ioConfigChanged(audio_io_config_event event, pid_t pid = 0,
+    virtual     void        ioConfigChanged(audio_io_config_event_t event, pid_t pid = 0,
                                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
                 void        readHalParameters_l();
     virtual     void        cacheParameters_l() {}
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index b582b3a..2677ab3 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -107,8 +107,7 @@
 
     audio_attributes_t  attributes() const { return mAttr; }
 
-            bool canBeSpatialized() const { return mIsOut && (mAttr.flags
-                    & (AUDIO_FLAG_CONTENT_SPATIALIZED | AUDIO_FLAG_NEVER_SPATIALIZE)) == 0; }
+    virtual bool        isSpatialized() const { return false; }
 
 #ifdef TEE_SINK
            void         dumpTee(int fd, const std::string &reason) const {
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index 7fb69be..30d69ab 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -116,6 +116,21 @@
         mDeviceStartupMs.add(startupMs);
     }
 
+    void updateMinMaxVolume(int64_t durationNs, double deviceVolume) {
+        if (deviceVolume > mMaxVolume) {
+            mMaxVolume = deviceVolume;
+            mMaxVolumeDurationNs = durationNs;
+        } else if (deviceVolume == mMaxVolume) {
+            mMaxVolumeDurationNs += durationNs;
+        }
+        if (deviceVolume < mMinVolume) {
+            mMinVolume = deviceVolume;
+            mMinVolumeDurationNs = durationNs;
+        } else if (deviceVolume == mMinVolume) {
+            mMinVolumeDurationNs += durationNs;
+        }
+    }
+
     // may be called multiple times during an interval
     void logVolume(float volume) {
         const int64_t timeNs = systemTime();
@@ -123,10 +138,13 @@
         if (mStartVolumeTimeNs == 0) {
             mDeviceVolume = mVolume = volume;
             mLastVolumeChangeTimeNs = mStartVolumeTimeNs = timeNs;
+            updateMinMaxVolume(0, mVolume);
             return;
         }
+        const int64_t durationNs = timeNs - mLastVolumeChangeTimeNs;
+        updateMinMaxVolume(durationNs, mVolume);
         mDeviceVolume = (mDeviceVolume * (mLastVolumeChangeTimeNs - mStartVolumeTimeNs) +
-            mVolume * (timeNs - mLastVolumeChangeTimeNs)) / (timeNs - mStartVolumeTimeNs);
+            mVolume * durationNs) / (timeNs - mStartVolumeTimeNs);
         mVolume = volume;
         mLastVolumeChangeTimeNs = timeNs;
     }
@@ -157,7 +175,11 @@
                 .set(AMEDIAMETRICS_PROP_EVENT, eventName)
                 .set(AMEDIAMETRICS_PROP_INTERVALCOUNT, (int32_t)mIntervalCount);
             if (mIsOut) {
-                item.set(AMEDIAMETRICS_PROP_DEVICEVOLUME, mDeviceVolume);
+                item.set(AMEDIAMETRICS_PROP_DEVICEVOLUME, mDeviceVolume)
+                    .set(AMEDIAMETRICS_PROP_DEVICEMAXVOLUMEDURATIONNS, mMaxVolumeDurationNs)
+                    .set(AMEDIAMETRICS_PROP_DEVICEMAXVOLUME, mMaxVolume)
+                    .set(AMEDIAMETRICS_PROP_DEVICEMINVOLUMEDURATIONNS, mMinVolumeDurationNs)
+                    .set(AMEDIAMETRICS_PROP_DEVICEMINVOLUME, mMinVolume);
             }
             if (mDeviceLatencyMs.getN() > 0) {
                 item.set(AMEDIAMETRICS_PROP_DEVICELATENCYMS, mDeviceLatencyMs.getMean())
@@ -185,6 +207,10 @@
         mDeviceVolume = 0.f;
         mStartVolumeTimeNs = 0;
         mLastVolumeChangeTimeNs = 0;
+        mMinVolume = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
+        mMaxVolume = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
+        mMinVolumeDurationNs = 0;
+        mMaxVolumeDurationNs = 0;
 
         mDeviceLatencyMs.reset();
         mDeviceStartupMs.reset();
@@ -214,6 +240,12 @@
     int64_t           mStartVolumeTimeNs GUARDED_BY(mLock) = 0;
     int64_t           mLastVolumeChangeTimeNs GUARDED_BY(mLock) = 0;
 
+    // Min/Max volume
+    double            mMinVolume GUARDED_BY(mLock) = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
+    double            mMaxVolume GUARDED_BY(mLock) = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
+    int64_t           mMinVolumeDurationNs GUARDED_BY(mLock) = 0;
+    int64_t           mMaxVolumeDurationNs GUARDED_BY(mLock) = 0;
+
     // latency and startup for each interval.
     audio_utils::Statistics<double> mDeviceLatencyMs GUARDED_BY(mLock);
     audio_utils::Statistics<double> mDeviceStartupMs GUARDED_BY(mLock);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index e0c5fa5..6135020 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -633,7 +633,8 @@
             track_type type,
             audio_port_handle_t portId,
             size_t frameCountToBeReady,
-            float speed)
+            float speed,
+            bool isSpatialized)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -667,7 +668,8 @@
     mResumeToStopping(false),
     mFlushHwPending(false),
     mFlags(flags),
-    mSpeed(speed)
+    mSpeed(speed),
+    mIsSpatialized(isSpatialized)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -713,8 +715,7 @@
         thread->mFastTrackAvailMask &= ~(1 << i);
     }
 
-    mServerLatencySupported = thread->type() == ThreadBase::MIXER
-            || thread->type() == ThreadBase::DUPLICATING;
+    mServerLatencySupported = checkServerLatencySupported(format, flags);
 #ifdef TEE_SINK
     mTee.setId(std::string("_") + std::to_string(mThreadIoHandle)
             + "_" + std::to_string(mId) + "_T");
@@ -1405,6 +1406,60 @@
             .content_type = mAttr.content_type,
             .gain = mFinalVolume,
     };
+
+    // When attributes are undefined, derive default values from stream type.
+    // See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
+    if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
+        switch (mStreamType) {
+        case AUDIO_STREAM_VOICE_CALL:
+            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_SYSTEM:
+            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_RING:
+            metadata.base.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_MUSIC:
+            metadata.base.usage = AUDIO_USAGE_MEDIA;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+            break;
+        case AUDIO_STREAM_ALARM:
+            metadata.base.usage = AUDIO_USAGE_ALARM;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_NOTIFICATION:
+            metadata.base.usage = AUDIO_USAGE_NOTIFICATION;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_DTMF:
+            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_ASSISTANT:
+            metadata.base.usage = AUDIO_USAGE_ASSISTANT;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_REROUTING:
+            metadata.base.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
+            // unknown content type
+            break;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            metadata.base.usage = AUDIO_USAGE_CALL_ASSISTANT;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        default:
+            break;
+        }
+    }
+
     metadata.channel_mask = mChannelMask,
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
@@ -2582,6 +2637,8 @@
     // ALOGD("FrameTime: %lld %lld", (long long)ft.frames, (long long)ft.timeNs);
     mKernelFrameTime.store(ft);
     if (!audio_is_linear_pcm(mFormat)) {
+        // Stream is direct, return provided timestamp with no conversion
+        mServerProxy->setTimestamp(timestamp);
         return;
     }
 
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 4078278..496591a 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <media/AudioCommonTypes.h>
+#include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioSystem.h>
 #include <media/AudioPolicy.h>
@@ -31,30 +32,42 @@
 
 // ----------------------------------------------------------------------------
 
-// The AudioPolicyInterface and AudioPolicyClientInterface classes define the communication interfaces
-// between the platform specific audio policy manager and Android generic audio policy manager.
-// The platform specific audio policy manager must implement methods of the AudioPolicyInterface class.
+// The AudioPolicyInterface and AudioPolicyClientInterface classes define the communication
+// interfaces between the platform specific audio policy manager and Android generic audio policy
+// manager.
+// The platform specific audio policy manager must implement methods of the AudioPolicyInterface
+// class.
 // This implementation makes use of the AudioPolicyClientInterface to control the activity and
 // configuration of audio input and output streams.
 //
 // The platform specific audio policy manager is in charge of the audio routing and volume control
 // policies for a given platform.
 // The main roles of this module are:
-//   - keep track of current system state (removable device connections, phone state, user requests...).
-//   System state changes and user actions are notified to audio policy manager with methods of the AudioPolicyInterface.
+//   - keep track of current system state (removable device connections, phone state,
+//   user requests...).
+//   System state changes and user actions are notified to audio policy manager with methods of the
+//   AudioPolicyInterface.
 //   - process getOutput() queries received when AudioTrack objects are created: Those queries
-//   return a handler on an output that has been selected, configured and opened by the audio policy manager and that
-//   must be used by the AudioTrack when registering to the AudioFlinger with the createTrack() method.
-//   When the AudioTrack object is released, a putOutput() query is received and the audio policy manager can decide
-//   to close or reconfigure the output depending on other streams using this output and current system state.
-//   - similarly process getInput() and putInput() queries received from AudioRecord objects and configure audio inputs.
-//   - process volume control requests: the stream volume is converted from an index value (received from UI) to a float value
-//   applicable to each output as a function of platform specific settings and current output route (destination device). It
-//   also make sure that streams are not muted if not allowed (e.g. camera shutter sound in some countries).
+//   return a handler on an output that has been selected, configured and opened by the audio
+//   policy manager and that must be used by the AudioTrack when registering to the AudioFlinger
+//   with the createTrack() method.
+//   When the AudioTrack object is released, a putOutput() query is received and the audio policy
+//   manager can decide to close or reconfigure the output depending on other streams using this
+//   output and current system state.
+//   - similarly process getInput() and putInput() queries received from AudioRecord objects and
+//   configure audio inputs.
+//   - process volume control requests: the stream volume is converted from an index value
+//   (received from UI) to a float value applicable to each output as a function of platform
+//   specificsettings and current output route (destination device). It also make sure that streams
+//   are not muted if not allowed (e.g. camera shutter sound in some countries).
 //
-// The platform specific audio policy manager is provided as a shared library by platform vendors (as for libaudio.so)
-// and is linked with libaudioflinger.so
-
+// The platform specific audio policy manager is provided as a shared library by platform vendors
+// (as for libaudio.so) and is linked with libaudioflinger.so
+//
+// NOTE: by convention, the implementation of the AudioPolicyInterface in AudioPolicyManager does
+// not have to perform any nullptr check on input arguments: The caller of this API is
+// AudioPolicyService running in the same process and in charge of validating arguments received
+// from incoming binder calls before calling AudioPolicyManager.
 
 //    Audio Policy Manager Interface
 class AudioPolicyInterface
@@ -92,14 +105,12 @@
     virtual void onNewAudioModulesAvailable() = 0;
 
     // indicate a change in device connection status
-    virtual status_t setDeviceConnectionState(audio_devices_t device,
-                                              audio_policy_dev_state_t state,
-                                              const char *device_address,
-                                              const char *device_name,
+    virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
+                                              const android::media::audio::common::AudioPort& port,
                                               audio_format_t encodedFormat) = 0;
     // retrieve a device connection status
     virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
-                                                                          const char *device_address) = 0;
+                                                              const char *device_address) = 0;
     // indicate a change in device configuration
     virtual status_t handleDeviceConfigChange(audio_devices_t device,
                                               const char *device_address,
@@ -132,10 +143,13 @@
                                         audio_port_handle_t *selectedDeviceId,
                                         audio_port_handle_t *portId,
                                         std::vector<audio_io_handle_t> *secondaryOutputs,
-                                        output_type_t *outputType) = 0;
-    // indicates to the audio policy manager that the output starts being used by corresponding stream.
+                                        output_type_t *outputType,
+                                        bool *isSpatialized) = 0;
+    // indicates to the audio policy manager that the output starts being used by corresponding
+    // stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
-    // indicates to the audio policy manager that the output stops being used by corresponding stream.
+    // indicates to the audio policy manager that the output stops being used by corresponding
+    // stream.
     virtual status_t stopOutput(audio_port_handle_t portId) = 0;
     // releases the output, return true if the output descriptor is reopened.
     virtual bool releaseOutput(audio_port_handle_t portId) = 0;
@@ -197,12 +211,10 @@
     // return the strategy corresponding to a given stream type
     virtual product_strategy_t getStrategyForStream(audio_stream_type_t stream) = 0;
 
-    // return the enabled output devices for the given stream type
-    virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
-
     // retrieves the list of enabled output devices for the given audio attributes
     virtual status_t getDevicesForAttributes(const audio_attributes_t &attr,
-                                             AudioDeviceTypeAddrVector *devices) = 0;
+                                             AudioDeviceTypeAddrVector *devices,
+                                             bool forVolume) = 0;
 
     // Audio effect management
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
@@ -285,8 +297,10 @@
 
     virtual bool     isHapticPlaybackSupported() = 0;
 
-    virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                std::vector<audio_format_t> *formats) = 0;
+    virtual bool     isUltrasoundSupported() = 0;
+
+    virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+                audio_devices_t device, std::vector<audio_format_t> *formats) = 0;
 
     virtual void     setAppState(audio_port_handle_t portId, app_state_t state) = 0;
 
@@ -377,8 +391,23 @@
      * @return NO_ERROR if an output was closed, INVALID_OPERATION or BAD_VALUE otherwise
      */
     virtual status_t releaseSpatializerOutput(audio_io_handle_t output) = 0;
-};
 
+    /**
+     * Query how the direct playback is currently supported on the device.
+     * @param attr audio attributes describing the playback use case
+     * @param config audio configuration for the playback
+     * @param directMode out: a set of flags describing how the direct playback is currently
+     *        supported on the device
+     * @return NO_ERROR in case of success, DEAD_OBJECT, NO_INIT, BAD_VALUE, PERMISSION_DENIED
+     *         in case of error.
+     */
+    virtual audio_direct_mode_t getDirectPlaybackSupport(const audio_attributes_t *attr,
+                                                         const audio_config_t *config) = 0;
+
+    // retrieves the list of available direct audio profiles for the given audio attributes
+    virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
+                                                    AudioProfileVector& audioProfiles) = 0;
+};
 
 // Audio Policy client Interface
 class AudioPolicyClientInterface
@@ -397,10 +426,13 @@
     // Audio output Control functions
     //
 
-    // opens an audio output with the requested parameters. The parameter values can indicate to use the default values
-    // in case the audio policy manager has no specific requirements for the output being opened.
-    // When the function returns, the parameter values reflect the actual values used by the audio hardware output stream.
-    // The audio policy manager can check if the proposed parameters are suitable or not and act accordingly.
+    // opens an audio output with the requested parameters. The parameter values can indicate to
+    // use the default values in case the audio policy manager has no specific requirements for the
+    // output being opened.
+    // When the function returns, the parameter values reflect the actual values used by the audio
+    // hardware output stream.
+    // The audio policy manager can check if the proposed parameters are suitable or not and act
+    // accordingly.
     virtual status_t openOutput(audio_module_handle_t module,
                                 audio_io_handle_t *output,
                                 audio_config_t *halConfig,
@@ -408,13 +440,15 @@
                                 const sp<DeviceDescriptorBase>& device,
                                 uint32_t *latencyMs,
                                 audio_output_flags_t flags) = 0;
-    // creates a special output that is duplicated to the two outputs passed as arguments. The duplication is performed by
-    // a special mixer thread in the AudioFlinger.
-    virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2) = 0;
+    // creates a special output that is duplicated to the two outputs passed as arguments.
+    // The duplication is performed by a special mixer thread in the AudioFlinger.
+    virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
+                                                  audio_io_handle_t output2) = 0;
     // closes the output stream
     virtual status_t closeOutput(audio_io_handle_t output) = 0;
-    // suspends the output. When an output is suspended, the corresponding audio hardware output stream is placed in
-    // standby and the AudioTracks attached to the mixer thread are still processed but the output mix is discarded.
+    // suspends the output. When an output is suspended, the corresponding audio hardware output
+    // stream is placed in standby and the AudioTracks attached to the mixer thread are still
+    // processed but the output mix is discarded.
     virtual status_t suspendOutput(audio_io_handle_t output) = 0;
     // restores a suspended output.
     virtual status_t restoreOutput(audio_io_handle_t output) = 0;
@@ -437,16 +471,21 @@
     // misc control functions
     //
 
-    // set a stream volume for a particular output. For the same user setting, a given stream type can have different volumes
+    // set a stream volume for a particular output. For the same user setting, a given stream type
+    // can have different volumes
     // for each output (destination device) it is attached to.
-    virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0) = 0;
+    virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
+                                     audio_io_handle_t output, int delayMs = 0) = 0;
 
     // invalidate a stream type, causing a reroute to an unspecified new output
     virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
 
-    // function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
-    virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0) = 0;
-    // function enabling to receive proprietary informations directly from audio hardware interface to audio policy manager.
+    // function enabling to send proprietary informations directly from audio policy manager to
+    // audio hardware interface.
+    virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs,
+                               int delayMs = 0) = 0;
+    // function enabling to receive proprietary informations directly from audio hardware interface
+    // to audio policy manager.
     virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) = 0;
 
     // set down link audio volume.
@@ -494,6 +533,10 @@
 
     virtual void onRoutingUpdated() = 0;
 
+    // Used to notify AudioService that an error was encountering when reading
+    // the volume ranges, and that they should be re-initialized
+    virtual void onVolumeRangeInitRequest() = 0;
+
     // Used to notify the sound trigger module that an audio capture is about to
     // take place. This should typically result in any active recognition
     // sessions to be preempted on modules that do not support sound trigger
@@ -504,12 +547,15 @@
 
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
     // methods respectively, expected by AudioPolicyService, needs to be exposed by
     // libaudiopolicymanagercustom.
-    using CreateAudioPolicyManagerInstance = AudioPolicyInterface* (*)(AudioPolicyClientInterface*);
+    using CreateAudioPolicyManagerInstance =
+            AudioPolicyInterface* (*)(AudioPolicyClientInterface*);
     using DestroyAudioPolicyManagerInstance = void (*)(AudioPolicyInterface*);
 
 } // namespace android
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index eb6c19e..f130f7c 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -2,6 +2,23 @@
   "presubmit": [
     {
        "name": "audiopolicy_tests"
+    },
+    {
+      "name": "GtsGmscoreHostTestCases",
+      "keywords": ["primary-device"],
+      "options" : [
+        {
+          "include-filter": "com.google.android.gts.audio.AudioHostTest#testTwoChannelCapturing"
+        }
+      ]
+    },
+    {
+      "name": "CtsNativeMediaAAudioTestCases",
+      "options" : [
+        {
+          "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+        }
+      ]
     }
   ]
 }
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 736f8b2..f0636a0 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -127,6 +127,7 @@
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
         case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
             return DEVICE_CATEGORY_HEADSET;
         case AUDIO_DEVICE_OUT_HEARING_AID:
             return DEVICE_CATEGORY_HEARING_AID;
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 577f641..3d3e0cf 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -133,6 +133,7 @@
  * - AUDIO_SOURCE_FM_TUNER
  * - AUDIO_SOURCE_VOICE_RECOGNITION
  * - AUDIO_SOURCE_HOTWORD
+ * - AUDIO_SOURCE_ULTRASOUND
  *
  * @return the corresponding input source priority or 0 if priority is irrelevant for this source.
  *      This happens when the specified source cannot share a given input stream (e.g remote submix)
@@ -142,22 +143,24 @@
 {
     switch (inputSource) {
     case AUDIO_SOURCE_VOICE_COMMUNICATION:
-        return 9;
+        return 10;
     case AUDIO_SOURCE_CAMCORDER:
-        return 8;
+        return 9;
     case AUDIO_SOURCE_VOICE_PERFORMANCE:
-        return 7;
+        return 8;
     case AUDIO_SOURCE_UNPROCESSED:
-        return 6;
+        return 7;
     case AUDIO_SOURCE_MIC:
-        return 5;
+        return 6;
     case AUDIO_SOURCE_ECHO_REFERENCE:
-        return 4;
+        return 5;
     case AUDIO_SOURCE_FM_TUNER:
-        return 3;
+        return 4;
     case AUDIO_SOURCE_VOICE_RECOGNITION:
-        return 2;
+        return 3;
     case AUDIO_SOURCE_HOTWORD:
+        return 2;
+    case AUDIO_SOURCE_ULTRASOUND:
         return 1;
     default:
         break;
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 227c2d8..1f23ae3 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -30,6 +30,7 @@
     ],
     shared_libs: [
         "libaudiofoundation",
+        "libbase",
         "libcutils",
         "libhidlbase",
         "liblog",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index a40f6aa..c26ea10 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -54,7 +54,7 @@
     DeviceVector supportedDevices() const  {
         return mProfile != nullptr ? mProfile->getSupportedDevices() :  DeviceVector(); }
 
-    void dump(String8 *dst) const override;
+    void dump(String8 *dst, int spaces, const char* extraInfo) const override;
 
     audio_io_handle_t   mIoHandle = AUDIO_IO_HANDLE_NONE; // input handle
     wp<AudioPolicyMix>  mPolicyMix;                   // non NULL when used by a dynamic policy
@@ -93,8 +93,10 @@
     audio_patch_handle_t getPatchHandle() const override;
     void setPatchHandle(audio_patch_handle_t handle) override;
     bool isMmap() override {
-        if (getPolicyAudioPort() != nullptr) {
-            return getPolicyAudioPort()->isMmap();
+        if (const auto policyPort = getPolicyAudioPort(); policyPort != nullptr) {
+            if (const auto port = policyPort->asAudioPort(); port != nullptr) {
+                return port->isMmap();
+            }
         }
         return false;
     }
@@ -142,6 +144,7 @@
     AudioPolicyClientInterface * const mClientInterface;
     int32_t mGlobalActiveCount = 0;  // non-client-specific activity ref count
     EffectDescriptorCollection mEnabledEffects;
+    audio_input_flags_t& mFlags = AudioPortConfig::mFlags.input;
 };
 
 class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 7c7f02d..75fa595 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -149,7 +149,7 @@
                           AudioPolicyClientInterface *clientInterface);
     virtual ~AudioOutputDescriptor() {}
 
-    void dump(String8 *dst) const override;
+    void dump(String8 *dst, int spaces, const char* extraInfo = nullptr) const override;
     void        log(const char* indent);
 
     virtual DeviceVector devices() const { return mDevices; }
@@ -158,7 +158,7 @@
     virtual bool isDuplicated() const { return false; }
     virtual uint32_t latency() { return 0; }
     virtual bool isFixedVolume(const DeviceTypeSet& deviceTypes);
-    virtual bool setVolume(float volumeDb,
+    virtual bool setVolume(float volumeDb, bool muted,
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
@@ -270,8 +270,10 @@
     audio_patch_handle_t getPatchHandle() const override;
     void setPatchHandle(audio_patch_handle_t handle) override;
     bool isMmap() override {
-        if (getPolicyAudioPort() != nullptr) {
-            return getPolicyAudioPort()->isMmap();
+        if (const auto policyPort = getPolicyAudioPort(); policyPort != nullptr) {
+            if (const auto port = policyPort->asAudioPort(); port != nullptr) {
+                return port->isMmap();
+            }
         }
         return false;
     }
@@ -303,15 +305,19 @@
     {
         return !devices().isEmpty() ? devices().itemAt(0)->hasGainController() : false;
     }
+    bool isRouted() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
 
     DeviceVector mDevices; /**< current devices this output is routed to */
     wp<AudioPolicyMix> mPolicyMix;  // non NULL when used by a dynamic policy
 
+    virtual uint32_t getRecommendedMuteDurationMs() const { return 0; }
+
 protected:
     const sp<PolicyAudioPort> mPolicyAudioPort;
     AudioPolicyClientInterface * const mClientInterface;
     uint32_t mGlobalActiveCount = 0;  // non-client-specific active count
     audio_patch_handle_t mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
+    audio_output_flags_t& mFlags = AudioPortConfig::mFlags.output;
 
     // The ActiveClients shows the clients that contribute to the @VolumeSource counts
     // and may include upstream clients from a duplicating thread.
@@ -332,7 +338,7 @@
                             AudioPolicyClientInterface *clientInterface);
     virtual ~SwAudioOutputDescriptor() {}
 
-            void dump(String8 *dst) const override;
+    void dump(String8 *dst, int spaces, const char* extraInfo = nullptr) const override;
     virtual DeviceVector devices() const;
     void setDevices(const DeviceVector &devices) { mDevices = devices; }
     bool sharesHwModuleWith(const sp<SwAudioOutputDescriptor>& outputDesc);
@@ -352,7 +358,22 @@
             setClientActive(client, false);
         }
     }
-    virtual bool setVolume(float volumeDb,
+
+    /**
+     * @brief setSwMute for SwOutput routed on a device that supports Hw Gain, this function allows
+     * to mute the tracks associated to a given volume source only.
+     * As an output may host one or more source(s), and as AudioPolicyManager may dispatch or not
+     * the volume change request according to the priority of the volume source to control the
+     * unique hw gain controller, a separated API allows to force a mute/unmute of a volume source.
+     * @param muted true to mute, false otherwise
+     * @param vs volume source to be considered
+     * @param device scoped for the change
+     * @param delayMs potentially applyed to prevent cut sounds.
+     */
+    void setSwMute(bool muted, VolumeSource vs, const StreamTypeVector &streams,
+                   const DeviceTypeSet& device, uint32_t delayMs);
+
+    virtual bool setVolume(float volumeDb, bool muted,
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& device,
                            uint32_t delayMs,
@@ -415,10 +436,14 @@
      */
     DeviceVector filterSupportedDevices(const DeviceVector &devices) const;
 
+    uint32_t getRecommendedMuteDurationMs() const override;
+
+    void setTracksInvalidatedStatusByStrategy(product_strategy_t strategy);
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
-    audio_output_flags_t mFlags;   //
+    using AudioOutputDescriptor::mFlags;
     sp<SwAudioOutputDescriptor> mOutput1;    // used by duplicated outputs: first output
     sp<SwAudioOutputDescriptor> mOutput2;    // used by duplicated outputs: second output
     uint32_t mDirectOpenCount; // number of clients using this output (direct outputs only)
@@ -435,9 +460,9 @@
                             AudioPolicyClientInterface *clientInterface);
     virtual ~HwAudioOutputDescriptor() {}
 
-            void dump(String8 *dst) const override;
+    void dump(String8 *dst, int spaces, const char* extraInfo) const override;
 
-    virtual bool setVolume(float volumeDb,
+    virtual bool setVolume(float volumeDb, bool muted,
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
@@ -493,6 +518,14 @@
                                       uint32_t inPastMs = 0, nsecs_t sysTime = 0) const;
 
     /**
+     * @brief isStrategyActive checks if the given strategy is active
+     * on the given output
+     * @param ps product strategy to be checked upon activity status
+     * @return true if an output following the strategy is active, false otherwise
+     */
+    bool isStrategyActive(product_strategy_t ps) const;
+
+    /**
      * @brief clearSessionRoutesForDevice: when a device is disconnected, and if this device has
      * been chosen as the preferred device by any client, the policy manager shall
      * prevent from using this device any more by clearing all the session routes involving this
@@ -537,6 +570,11 @@
 
     sp<SwAudioOutputDescriptor> getOutputForClient(audio_port_handle_t portId);
 
+    /**
+     * return whether any output is active and routed to any of the specified devices
+     */
+    bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
+
     void dump(String8 *dst) const;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h b/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h
index a5de655..955b0cf 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h
@@ -41,7 +41,7 @@
 
     void setUid(uid_t uid) { mUid = uid; }
 
-    void dump(String8 *dst, int spaces, int index) const;
+    void dump(String8 *dst, int spaces) const;
 
     struct audio_patch mPatch;
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index a8fd856..cf1f64c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -202,20 +202,6 @@
             {AUDIO_FORMAT_AC4, {}}};
     }
 
-    //TODO: b/193496180 use spatializer flag at audio HAL when available
-    // until then, use DEEP_BUFFER+FAST flag combo to indicate the spatializer output profile
-    void convertSpatializerFlag()
-    {
-        for (const auto& hwModule : mHwModules) {
-            for (const auto& curProfile : hwModule->getOutputProfiles()) {
-                if (curProfile->getFlags()
-                        == (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_DEEP_BUFFER)) {
-                    curProfile->setFlags(AUDIO_OUTPUT_FLAG_SPATIALIZER);
-                }
-            }
-        }
-    }
-
 private:
     static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 74b3405..0431619 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -16,19 +16,21 @@
 
 #pragma once
 
-#include <vector>
-#include <map>
-#include <unistd.h>
 #include <sys/types.h>
+#include <unistd.h>
 
-#include <system/audio.h>
+#include <map>
+#include <vector>
+
+#include <android-base/stringprintf.h>
 #include <audiomanager/AudioManager.h>
 #include <media/AudioProductStrategy.h>
+#include <policy.h>
+#include <system/audio.h>
 #include <utils/Errors.h>
 #include <utils/KeyedVector.h>
 #include <utils/RefBase.h>
 #include <utils/String8.h>
-#include <policy.h>
 #include <Volume.h>
 #include "AudioPatch.h"
 #include "EffectDescriptor.h"
@@ -52,9 +54,15 @@
         mPreferredDeviceForExclusiveUse(isPreferredDeviceForExclusiveUse){}
     ~ClientDescriptor() override = default;
 
-    virtual void dump(String8 *dst, int spaces, int index) const;
+    virtual void dump(String8 *dst, int spaces) const;
     virtual std::string toShortString() const;
-
+    /**
+     * @brief isInternal
+     * @return true if the client corresponds to an audio patch created from createAudioPatch API or
+     * for call audio routing, or false if the client corresponds to an AudioTrack, AudioRecord or
+     * HW Audio Source.
+     */
+    virtual bool isInternal() const { return false; }
     audio_port_handle_t portId() const { return mPortId; }
     uid_t uid() const { return mUid; }
     audio_session_t session() const { return mSessionId; };
@@ -67,8 +75,16 @@
     bool isPreferredDeviceForExclusiveUse() const { return mPreferredDeviceForExclusiveUse; }
     virtual void setActive(bool active) { mActive = active; }
     bool active() const { return mActive; }
+    /**
+     * @brief hasPreferredDevice Note that as internal clients use preferred device for convenience,
+     * we do hide this internal behavior to prevent from regression (like invalidating track for
+     * clients following same strategies...)
+     * @param activeOnly
+     * @return
+     */
     bool hasPreferredDevice(bool activeOnly = false) const {
-        return mPreferredDeviceId != AUDIO_PORT_HANDLE_NONE && (!activeOnly || mActive);
+        return !isInternal() &&
+                mPreferredDeviceId != AUDIO_PORT_HANDLE_NONE && (!activeOnly || mActive);
     }
 
 private:
@@ -100,7 +116,7 @@
     ~TrackClientDescriptor() override = default;
 
     using ClientDescriptor::dump;
-    void dump(String8 *dst, int spaces, int index) const override;
+    void dump(String8 *dst, int spaces) const override;
     std::string toShortString() const override;
 
     audio_output_flags_t flags() const { return mFlags; }
@@ -141,6 +157,14 @@
     }
     uint32_t getActivityCount() const { return mActivityCount; }
 
+    bool isInvalid() const {
+        return mIsInvalid;
+    }
+
+    void setIsInvalid() {
+        mIsInvalid = true;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -153,6 +177,7 @@
      * involved in a duplication.
      */
     uint32_t mActivityCount = 0;
+    bool mIsInvalid = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
@@ -168,7 +193,7 @@
     ~RecordClientDescriptor() override = default;
 
     using ClientDescriptor::dump;
-    void dump(String8 *dst, int spaces, int index) const override;
+    void dump(String8 *dst, int spaces) const override;
 
     audio_unique_id_t riid() const { return mRIId; }
     audio_source_t source() const { return mSource; }
@@ -209,6 +234,11 @@
         mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
         mSinkDevice = nullptr;
     }
+    bool belongsToOutput(const sp<SwAudioOutputDescriptor> &swOutput) const {
+        return swOutput != nullptr && mSwOutput.promote() == swOutput;
+    }
+    void setUseSwBridge() { mUseSwBridge = true; }
+    bool useSwBridge() const { return mUseSwBridge; }
     bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
     sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
@@ -219,7 +249,7 @@
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
 
     using ClientDescriptor::dump;
-    void dump(String8 *dst, int spaces, int index) const override;
+    void dump(String8 *dst, int spaces) const override;
 
  private:
     audio_patch_handle_t mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
@@ -227,6 +257,35 @@
     sp<DeviceDescriptor> mSinkDevice;
     wp<SwAudioOutputDescriptor> mSwOutput;
     wp<HwAudioOutputDescriptor> mHwOutput;
+    bool mUseSwBridge = false;
+};
+
+/**
+ * @brief The InternalSourceClientDescriptor class
+ * Specialized Client Descriptor for either a raw patch created from @see createAudioPatch API
+ * or for internal audio patches managed by APM (e.g. phone call patches).
+ * Whatever the bridge created (software or hardware), we need a client to track the activity
+ * and manage volumes.
+ * The Audio Patch requested sink is expressed as a preferred device which allows to route
+ * the SwOutput. Then APM will performs checks on the UID (against UID of Audioserver) of the
+ * requester to prevent rerouting SwOutput involved in raw patches.
+ */
+class InternalSourceClientDescriptor: public SourceClientDescriptor
+{
+public:
+    InternalSourceClientDescriptor(
+            audio_port_handle_t portId, uid_t uid, audio_attributes_t attributes,
+            const struct audio_port_config &config, const sp<DeviceDescriptor>& srcDevice,
+             const sp<DeviceDescriptor>& sinkDevice,
+            product_strategy_t strategy, VolumeSource volumeSource) :
+        SourceClientDescriptor(
+            portId, uid, attributes, config, srcDevice, AUDIO_STREAM_PATCH, strategy,
+            volumeSource)
+    {
+        setPreferredDeviceId(sinkDevice->getId());
+    }
+    bool isInternal() const override { return true; }
+    ~InternalSourceClientDescriptor() override = default;
 };
 
 class SourceClientCollection :
@@ -269,10 +328,13 @@
     size_t getClientCount() const {
         return mClients.size();
     }
-    virtual void dump(String8 *dst) const {
+    virtual void dump(String8 *dst, int spaces, const char* extraInfo = nullptr) const {
+        (void)extraInfo;
         size_t index = 0;
         for (const auto& client: getClientIterable()) {
-            client->dump(dst, 2, index++);
+            const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", ++index);
+            dst->appendFormat("%s", prefix.c_str());
+            client->dump(dst, prefix.size());
         }
     }
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 58d05c6..4adc920 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -43,7 +43,7 @@
     DeviceDescriptor(const AudioDeviceTypeAddr &deviceTypeAddr, const std::string &tagName = "",
             const FormatVector &encodedFormats = FormatVector{});
 
-    virtual ~DeviceDescriptor() {}
+    virtual ~DeviceDescriptor() = default;
 
     virtual void addAudioProfile(const sp<AudioProfile> &profile) {
         addAudioProfileAndSort(mProfiles, profile);
@@ -51,8 +51,6 @@
 
     virtual const std::string getTagName() const { return mTagName; }
 
-    const FormatVector& encodedFormats() const { return mEncodedFormats; }
-
     audio_format_t getEncodedFormat() { return mCurrentEncodedFormat; }
 
     void setEncodedFormat(audio_format_t format) {
@@ -63,8 +61,6 @@
 
     bool hasCurrentEncodedFormat() const;
 
-    bool supportsFormat(audio_format_t format);
-
     void setDynamic() { mIsDynamic = true; }
     bool isDynamic() const { return mIsDynamic; }
 
@@ -95,7 +91,7 @@
 
     void setEncapsulationInfoFromHal(AudioPolicyClientInterface *clientInterface);
 
-    void dump(String8 *dst, int spaces, int index, bool verbose = true) const;
+    void dump(String8 *dst, int spaces, bool verbose = true) const;
 
 private:
     template <typename T, std::enable_if_t<std::is_same<T, struct audio_port>::value
@@ -106,7 +102,6 @@
     }
 
     std::string mTagName; // Unique human readable identifier for a device port found in conf file.
-    FormatVector        mEncodedFormats;
     audio_format_t      mCurrentEncodedFormat;
     bool                mIsDynamic = false;
     const std::string   mDeclaredAddress; // Original device address
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 54b3408..436fcc1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -115,7 +115,7 @@
                        const sp<PolicyAudioPort> &dstPort) const;
 
     // TODO remove from here (split serialization)
-    void dump(String8 *dst) const;
+    void dump(String8 *dst, int spaces) const;
 
 private:
     void refreshSupportedDevices();
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index a74cefa..90b812d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -37,9 +37,7 @@
 public:
     IOProfile(const std::string &name, audio_port_role_t role)
         : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
-          maxOpenCount(1),
           curOpenCount(0),
-          maxActiveCount(1),
           curActiveCount(0) {}
 
     virtual ~IOProfile() = default;
@@ -59,11 +57,12 @@
     // Once capture clients are tracked individually and not per session this can be removed
     // MMAP no IRQ input streams do not have the default limitation of one active client
     // max as they can be used in shared mode by the same application.
+    // NOTE: Please consider moving to AudioPort when addressing the FIXME
     // NOTE: this works for explicit values set in audio_policy_configuration.xml because
     // flags are parsed before maxActiveCount by the serializer.
     void setFlags(uint32_t flags) override
     {
-        PolicyAudioPort::setFlags(flags);
+        AudioPort::setFlags(flags);
         if (getRole() == AUDIO_PORT_ROLE_SINK && (flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             maxActiveCount = 0;
         }
@@ -98,7 +97,7 @@
                              uint32_t flags,
                              bool exactMatchRequiredForInputFlags = false) const;
 
-    void dump(String8 *dst) const;
+    void dump(String8 *dst, int spaces) const;
     void log();
 
     bool hasSupportedDevices() const { return !mSupportedDevices.isEmpty(); }
@@ -194,16 +193,8 @@
         return false;
     }
 
-    // Maximum number of input or output streams that can be simultaneously opened for this profile.
-    // By convention 0 means no limit. To respect legacy behavior, initialized to 1 for output
-    // profiles and 0 for input profiles
-    uint32_t     maxOpenCount;
     // Number of streams currently opened for this profile.
     uint32_t     curOpenCount;
-    // Maximum number of input or output streams that can be simultaneously active for this profile.
-    // By convention 0 means no limit. To respect legacy behavior, initialized to 0 for output
-    // profiles and 1 for input profiles
-    uint32_t     maxActiveCount;
     // Number of streams currently active for this profile. This is not the number of active clients
     // (AudioTrack or AudioRecord) but the number of active HAL streams.
     uint32_t     curActiveCount;
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index ab33b38..acf787b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -36,7 +36,7 @@
 class PolicyAudioPort : public virtual RefBase, private HandleGenerator<audio_port_handle_t>
 {
 public:
-    PolicyAudioPort() : mFlags(AUDIO_OUTPUT_FLAG_NONE) {}
+    PolicyAudioPort() = default;
 
     virtual ~PolicyAudioPort() = default;
 
@@ -49,19 +49,6 @@
 
     virtual sp<AudioPort> asAudioPort() const = 0;
 
-    virtual void setFlags(uint32_t flags)
-    {
-        //force direct flag if offload flag is set: offloading implies a direct output stream
-        // and all common behaviors are driven by checking only the direct flag
-        // this should normally be set appropriately in the policy configuration file
-        if (asAudioPort()->getRole() == AUDIO_PORT_ROLE_SOURCE &&
-                (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
-            flags |= AUDIO_OUTPUT_FLAG_DIRECT;
-        }
-        mFlags = flags;
-    }
-    uint32_t getFlags() const { return mFlags; }
-
     virtual void attach(const sp<HwModule>& module);
     virtual void detach();
     bool isAttached() { return mModule != 0; }
@@ -105,22 +92,6 @@
     const char *getModuleName() const;
     sp<HwModule> getModule() const { return mModule; }
 
-    inline bool isDirectOutput() const
-    {
-        return (asAudioPort()->getType() == AUDIO_PORT_TYPE_MIX) &&
-                (asAudioPort()->getRole() == AUDIO_PORT_ROLE_SOURCE) &&
-                (mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD));
-    }
-
-    inline bool isMmap() const
-    {
-        return (asAudioPort()->getType() == AUDIO_PORT_TYPE_MIX)
-                && (((asAudioPort()->getRole() == AUDIO_PORT_ROLE_SOURCE) &&
-                        ((mFlags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0))
-                    || ((asAudioPort()->getRole() == AUDIO_PORT_ROLE_SINK) &&
-                        ((mFlags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0)));
-    }
-
     void addRoute(const sp<AudioRoute> &route) { mRoutes.add(route); }
     const AudioRouteVector &getRoutes() const { return mRoutes; }
 
@@ -129,7 +100,6 @@
                          const ChannelMaskSet &channelMasks) const;
     void pickSamplingRate(uint32_t &rate, const SampleRateSet &samplingRates) const;
 
-    uint32_t mFlags; // attribute flags mask (e.g primary output, direct output...).
     sp<HwModule> mModule;     // audio HW module exposing this I/O stream
     AudioRouteVector mRoutes; // Routes involving this port
 };
@@ -141,27 +111,18 @@
 
     virtual sp<PolicyAudioPort> getPolicyAudioPort() const = 0;
 
-    status_t validationBeforeApplyConfig(const struct audio_port_config *config) const;
-
-    void applyPolicyAudioPortConfig(const struct audio_port_config *config) {
-        if (config->config_mask & AUDIO_PORT_CONFIG_FLAGS) {
-            mFlags = config->flags;
-        }
+    status_t validationBeforeApplyConfig(const struct audio_port_config *config) const {
+        sp<PolicyAudioPort> policyAudioPort = getPolicyAudioPort();
+        return policyAudioPort ? policyAudioPort->checkExactAudioProfile(config) : NO_INIT;
     }
 
-    void toPolicyAudioPortConfig(
-            struct audio_port_config *dstConfig,
-            const struct audio_port_config *srcConfig = NULL) const;
-
-
-    virtual bool hasSameHwModuleAs(const sp<PolicyAudioPortConfig>& other) const {
+    bool hasSameHwModuleAs(const sp<PolicyAudioPortConfig>& other) const {
         return (other.get() != nullptr) && (other->getPolicyAudioPort().get() != nullptr) &&
                 (getPolicyAudioPort().get() != nullptr) &&
                 (other->getPolicyAudioPort()->getModuleHandle() ==
                         getPolicyAudioPort()->getModuleHandle());
     }
 
-    union audio_io_flags mFlags = { AUDIO_INPUT_FLAG_NONE };
 };
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp
index cd10010..580938e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioCollections.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "APM::AudioCollections"
 //#define LOG_NDEBUG 0
 
+#include <android-base/stringprintf.h>
+
 #include "AudioCollections.h"
 #include "AudioRoute.h"
 #include "HwModule.h"
@@ -40,10 +42,11 @@
     if (audioRouteVector.isEmpty()) {
         return;
     }
-    dst->appendFormat("\n%*sAudio Routes (%zu):\n", spaces, "", audioRouteVector.size());
+    dst->appendFormat("%*s- Audio Routes (%zu):\n", spaces - 2, "", audioRouteVector.size());
     for (size_t i = 0; i < audioRouteVector.size(); i++) {
-        dst->appendFormat("%*s- Route %zu:\n", spaces, "", i + 1);
-        audioRouteVector.itemAt(i)->dump(dst, 4);
+        const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", i + 1);
+        dst->append(prefix.c_str());
+        audioRouteVector.itemAt(i)->dump(dst, prefix.size());
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 7016a08..6f71ac5 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "APM::AudioInputDescriptor"
 //#define LOG_NDEBUG 0
 
+#include <android-base/stringprintf.h>
+
 #include <audiomanager/AudioManager.h>
 #include <media/AudioPolicy.h>
 #include <policy.h>
@@ -37,6 +39,7 @@
         if (profile->getGains().size() > 0) {
             profile->getGains()[0]->getDefaultConfig(&mGain);
         }
+        mFlags = (audio_input_flags_t)profile->getFlags();
     }
 }
 
@@ -62,7 +65,6 @@
     toAudioPortConfig(&localBackupConfig);
     if ((status = validationBeforeApplyConfig(config)) == NO_ERROR) {
         AudioPortConfig::applyAudioPortConfig(config, backupConfig);
-        applyPolicyAudioPortConfig(config);
     }
 
     if (backupConfig != NULL) {
@@ -76,14 +78,12 @@
 {
     ALOG_ASSERT(mProfile != 0,
                 "toAudioPortConfig() called on input with null profile %d", mIoHandle);
-    dstConfig->config_mask = AUDIO_PORT_CONFIG_SAMPLE_RATE|AUDIO_PORT_CONFIG_CHANNEL_MASK|
-                            AUDIO_PORT_CONFIG_FORMAT|AUDIO_PORT_CONFIG_GAIN;
+    dstConfig->config_mask = AUDIO_PORT_CONFIG_ALL;
     if (srcConfig != NULL) {
         dstConfig->config_mask |= srcConfig->config_mask;
     }
 
     AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig);
-    toPolicyAudioPortConfig(dstConfig, srcConfig);
 
     dstConfig->role = AUDIO_PORT_ROLE_SINK;
     dstConfig->type = AUDIO_PORT_TYPE_MIX;
@@ -510,17 +510,25 @@
     }
 }
 
-void AudioInputDescriptor::dump(String8 *dst) const
+void AudioInputDescriptor::dump(String8 *dst, int spaces, const char* extraInfo) const
 {
-    dst->appendFormat(" ID: %d\n", getId());
-    dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
-    dst->appendFormat(" Format: %d\n", mFormat);
-    dst->appendFormat(" Channels: %08x\n", mChannelMask);
-    dst->appendFormat(" Devices %s\n", mDevice->toString(true /*includeSensitiveInfo*/).c_str());
-    mEnabledEffects.dump(dst, 1 /*spaces*/, false /*verbose*/);
-    dst->append(" AudioRecord Clients:\n");
-    ClientMapHandler<RecordClientDescriptor>::dump(dst);
-    dst->append("\n");
+    std::string flagsLiteral = toString(mFlags);
+    if (!flagsLiteral.empty()) {
+        flagsLiteral = " (" + flagsLiteral + ")";
+    }
+    dst->appendFormat("Port ID: %d; 0x%04x%s%s%s\n",
+            getId(), mFlags, flagsLiteral.c_str(),
+            extraInfo != nullptr ? "; " : "", extraInfo != nullptr ? extraInfo : "");
+    dst->appendFormat("%*s%s; %d; Channel mask: 0x%x\n", spaces, "",
+            audio_format_to_string(mFormat), mSamplingRate, mChannelMask);
+    dst->appendFormat("%*sDevices: %s\n", spaces, "",
+            mDevice->toString(true /*includeSensitiveInfo*/).c_str());
+    mEnabledEffects.dump(dst, spaces /*spaces*/, false /*verbose*/);
+    if (getClientCount() != 0) {
+        dst->appendFormat("%*sAudioRecord Clients (%zu):\n", spaces, "", getClientCount());
+        ClientMapHandler<RecordClientDescriptor>::dump(dst, spaces);
+        dst->append("\n");
+    }
 }
 
 bool AudioInputCollection::isSourceActive(audio_source_t source) const
@@ -608,10 +616,12 @@
 
 void AudioInputCollection::dump(String8 *dst) const
 {
-    dst->append("\nInputs dump:\n");
+    dst->appendFormat("\n Inputs (%zu):\n", size());
     for (size_t i = 0; i < size(); i++) {
-        dst->appendFormat("- Input %d dump:\n", keyAt(i));
-        valueAt(i)->dump(dst);
+        const std::string prefix = base::StringPrintf("  %zu. ", i + 1);
+        const std::string extraInfo = base::StringPrintf("I/O handle: %d", keyAt(i));
+        dst->appendFormat("%s", prefix.c_str());
+        valueAt(i)->dump(dst, prefix.size(), extraInfo.c_str());
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 866f1b5..8eefe775 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "APM::AudioOutputDescriptor"
 //#define LOG_NDEBUG 0
 
+#include <android-base/stringprintf.h>
+
 #include <AudioPolicyInterface.h>
 #include "AudioOutputDescriptor.h"
 #include "AudioPolicyMix.h"
@@ -155,7 +157,7 @@
     return false;
 }
 
-bool AudioOutputDescriptor::setVolume(float volumeDb,
+bool AudioOutputDescriptor::setVolume(float volumeDb, bool /*muted*/,
                                       VolumeSource volumeSource,
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
@@ -188,7 +190,6 @@
     toAudioPortConfig(&localBackupConfig);
     if ((status = validationBeforeApplyConfig(config)) == NO_ERROR) {
         AudioPortConfig::applyAudioPortConfig(config, backupConfig);
-        applyPolicyAudioPortConfig(config);
     }
 
     if (backupConfig != NULL) {
@@ -201,13 +202,11 @@
 void AudioOutputDescriptor::toAudioPortConfig(struct audio_port_config *dstConfig,
                                               const struct audio_port_config *srcConfig) const
 {
-    dstConfig->config_mask = AUDIO_PORT_CONFIG_SAMPLE_RATE|AUDIO_PORT_CONFIG_CHANNEL_MASK|
-                            AUDIO_PORT_CONFIG_FORMAT|AUDIO_PORT_CONFIG_GAIN;
+    dstConfig->config_mask = AUDIO_PORT_CONFIG_ALL;
     if (srcConfig != NULL) {
         dstConfig->config_mask |= srcConfig->config_mask;
     }
     AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig);
-    toPolicyAudioPortConfig(dstConfig, srcConfig);
 
     dstConfig->role = AUDIO_PORT_ROLE_SOURCE;
     dstConfig->type = AUDIO_PORT_TYPE_MIX;
@@ -245,32 +244,45 @@
         return client->volumeSource() != volumeSourceToIgnore; }) != end(mActiveClients);
 }
 
-void AudioOutputDescriptor::dump(String8 *dst) const
+void AudioOutputDescriptor::dump(String8 *dst, int spaces, const char* extraInfo) const
 {
-    dst->appendFormat(" ID: %d\n", mId);
-    dst->appendFormat(" Sampling rate: %d\n", mSamplingRate);
-    dst->appendFormat(" Format: %08x\n", mFormat);
-    dst->appendFormat(" Channels: %08x\n", mChannelMask);
-    dst->appendFormat(" Devices: %s\n", devices().toString(true /*includeSensitiveInfo*/).c_str());
-    dst->appendFormat(" Global active count: %u\n", mGlobalActiveCount);
-    for (const auto &iter : mRoutingActivities) {
-        dst->appendFormat(" Product Strategy id: %d", iter.first);
-        iter.second.dump(dst, 4);
+    dst->appendFormat("Port ID: %d%s%s\n",
+            mId, extraInfo != nullptr ? "; " : "", extraInfo != nullptr ? extraInfo : "");
+    dst->appendFormat("%*s%s; %d; Channel mask: 0x%x\n", spaces, "",
+            audio_format_to_string(mFormat), mSamplingRate, mChannelMask);
+    dst->appendFormat("%*sDevices: %s\n", spaces, "",
+            devices().toString(true /*includeSensitiveInfo*/).c_str());
+    dst->appendFormat("%*sGlobal active count: %u\n", spaces, "", mGlobalActiveCount);
+    if (!mRoutingActivities.empty()) {
+        dst->appendFormat("%*s- Product Strategies (%zu):\n", spaces - 2, "",
+                mRoutingActivities.size());
+        for (const auto &iter : mRoutingActivities) {
+            dst->appendFormat("%*sid %d: ", spaces + 1, "", iter.first);
+            iter.second.dump(dst, 0);
+        }
     }
-    for (const auto &iter : mVolumeActivities) {
-        dst->appendFormat(" Volume Activities id: %d", iter.first);
-        iter.second.dump(dst, 4);
+    if (!mVolumeActivities.empty()) {
+        dst->appendFormat("%*s- Volume Activities (%zu):\n", spaces - 2, "",
+                mVolumeActivities.size());
+        for (const auto &iter : mVolumeActivities) {
+            dst->appendFormat("%*sid %d: ", spaces + 1, "", iter.first);
+            iter.second.dump(dst, 0);
+        }
     }
-    dst->append(" AudioTrack Clients:\n");
-    ClientMapHandler<TrackClientDescriptor>::dump(dst);
-    dst->append("\n");
+    if (getClientCount() != 0) {
+        dst->appendFormat("%*s- AudioTrack clients (%zu):\n", spaces - 2, "", getClientCount());
+        ClientMapHandler<TrackClientDescriptor>::dump(dst, spaces);
+    }
     if (!mActiveClients.empty()) {
-        dst->append(" AudioTrack active (stream) clients:\n");
+        dst->appendFormat("%*s- AudioTrack active (stream) clients (%zu):\n", spaces - 2, "",
+                mActiveClients.size());
         size_t index = 0;
         for (const auto& client : mActiveClients) {
-            client->dump(dst, 2, index++);
+            const std::string prefix = base::StringPrintf(
+                    "%*sid %zu: ", spaces + 1, "", index + 1);
+            dst->appendFormat("%s", prefix.c_str());
+            client->dump(dst, prefix.size());
         }
-        dst->append(" \n");
     }
 }
 
@@ -285,7 +297,6 @@
                                                  AudioPolicyClientInterface *clientInterface)
     : AudioOutputDescriptor(profile, clientInterface),
     mProfile(profile), mIoHandle(AUDIO_IO_HANDLE_NONE), mLatency(0),
-    mFlags((audio_output_flags_t)0),
     mOutput1(0), mOutput2(0), mDirectOpenCount(0),
     mDirectClientSession(AUDIO_SESSION_NONE)
 {
@@ -294,11 +305,18 @@
     }
 }
 
-void SwAudioOutputDescriptor::dump(String8 *dst) const
+void SwAudioOutputDescriptor::dump(String8 *dst, int spaces, const char* extraInfo) const
 {
-    dst->appendFormat(" Latency: %d\n", mLatency);
-    dst->appendFormat(" Flags %08x\n", mFlags);
-    AudioOutputDescriptor::dump(dst);
+    String8 allExtraInfo;
+    if (extraInfo != nullptr) {
+        allExtraInfo.appendFormat("%s; ", extraInfo);
+    }
+    std::string flagsLiteral = toString(mFlags);
+    allExtraInfo.appendFormat("Latency: %d; 0x%04x", mLatency, mFlags);
+    if (!flagsLiteral.empty()) {
+        allExtraInfo.appendFormat(" (%s)", flagsLiteral.c_str());
+    }
+    AudioOutputDescriptor::dump(dst, spaces, allExtraInfo.c_str());
 }
 
 DeviceVector SwAudioOutputDescriptor::devices() const
@@ -435,14 +453,36 @@
             mFlags & AUDIO_OUTPUT_FLAG_FAST ? AUDIO_LATENCY_LOW : AUDIO_LATENCY_NORMAL;
 }
 
-bool SwAudioOutputDescriptor::setVolume(float volumeDb,
+void SwAudioOutputDescriptor::setSwMute(
+        bool muted, VolumeSource vs, const StreamTypeVector &streamTypes,
+        const DeviceTypeSet& deviceTypes, uint32_t delayMs) {
+    // volume source active and more than one volume source is active, otherwise, no-op or let
+    // setVolume controlling SW and/or HW Gains
+    if (!streamTypes.empty() && isActive(vs) && (getActiveVolumeSources().size() > 1)) {
+        for (const auto& devicePort : devices()) {
+            if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
+                    devicePort->hasGainController(true /*canUseForVolume*/)) {
+                float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
+                      mIoHandle, vs, muted, getActiveVolumeSources().size());
+                for (const auto &stream : streamTypes) {
+                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                }
+                return;
+            }
+        }
+    }
+}
+
+bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool muted,
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
                                         bool force)
 {
     StreamTypeVector streams = streamTypes;
-    if (!AudioOutputDescriptor::setVolume(volumeDb, vs, streamTypes, deviceTypes, delayMs, force)) {
+    if (!AudioOutputDescriptor::setVolume(
+            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force)) {
         return false;
     }
     if (streams.empty()) {
@@ -459,11 +499,17 @@
             // different Volume Source (or if we allow several curves within same volume group)
             //
             // @todo: default stream volume to max (0) when using HW Port gain?
-            float volumeAmpl = Volume::DbToAmpl(0);
-            for (const auto &stream : streams) {
-                mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+            // Allows to set SW Gain on AudioFlinger if:
+            //    -volume group has explicit stream(s) associated
+            //    -volume group with no explicit stream(s) is the only active source on this output
+            // Allows to mute SW Gain on AudioFlinger only for volume group with explicit stream(s)
+            if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
+                const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
+                float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
+                for (const auto &stream : streams) {
+                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                }
             }
-
             AudioGains gains = devicePort->getGains();
             int gainMinValueInMb = gains[0]->getMinValueInMb();
             int gainMaxValueInMb = gains[0]->getMaxValueInMb();
@@ -525,12 +571,6 @@
         lHalConfig.offload_info.channel_mask = lHalConfig.channel_mask;
         lHalConfig.offload_info.format = lHalConfig.format;
         lHalConfig.offload_info.stream_type = stream;
-        lHalConfig.offload_info.duration_us = -1;
-        lHalConfig.offload_info.has_video = true; // conservative
-        lHalConfig.offload_info.is_streaming = true; // likely
-        lHalConfig.offload_info.encapsulation_mode = lHalConfig.offload_info.encapsulation_mode;
-        lHalConfig.offload_info.content_id = lHalConfig.offload_info.content_id;
-        lHalConfig.offload_info.sync_id = lHalConfig.offload_info.sync_id;
     }
 
     audio_config_base_t lMixerConfig;
@@ -545,15 +585,11 @@
 
     mFlags = (audio_output_flags_t)(mFlags | flags);
 
-    //TODO: b/193496180 use spatializer flag at audio HAL when available
-    audio_output_flags_t halFlags = mFlags;
-    if ((mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
-        halFlags = (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
-        // If no mixer config is specified for a spatializer output, default to 5.1 for proper
-        // configuration of the final downmixer or spatializer
-        if (mixerConfig == nullptr) {
-            lMixerConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
-        }
+    // If no mixer config is specified for a spatializer output, default to 5.1 for proper
+    // configuration of the final downmixer or spatializer
+    if ((mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
+            && mixerConfig == nullptr) {
+        lMixerConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
     }
 
     ALOGV("opening output for device %s profile %p name %s",
@@ -565,7 +601,7 @@
                                                    &lMixerConfig,
                                                    device,
                                                    &mLatency,
-                                                   halFlags);
+                                                   mFlags);
 
     if (status == NO_ERROR) {
         LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
@@ -675,6 +711,23 @@
     return NO_ERROR;
 }
 
+uint32_t SwAudioOutputDescriptor::getRecommendedMuteDurationMs() const
+{
+    if (isDuplicated()) {
+        return std::max(mOutput1->getRecommendedMuteDurationMs(),
+                mOutput2->getRecommendedMuteDurationMs());
+    }
+    return mProfile->recommendedMuteDurationMs;
+}
+
+void SwAudioOutputDescriptor::setTracksInvalidatedStatusByStrategy(product_strategy_t strategy) {
+    for (const auto &client : getClientIterable()) {
+        if (strategy == client->strategy()) {
+            client->setIsInvalid();
+        }
+    }
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
@@ -683,11 +736,11 @@
 {
 }
 
-void HwAudioOutputDescriptor::dump(String8 *dst) const
+void HwAudioOutputDescriptor::dump(String8 *dst, int spaces, const char* extraInfo) const
 {
-    AudioOutputDescriptor::dump(dst);
-    dst->append("Source:\n");
-    mSource->dump(dst, 0, 0);
+    AudioOutputDescriptor::dump(dst, spaces, extraInfo);
+    dst->appendFormat("%*sSource:\n", spaces, "");
+    mSource->dump(dst, spaces);
 }
 
 void HwAudioOutputDescriptor::toAudioPortConfig(
@@ -703,14 +756,14 @@
 }
 
 
-bool HwAudioOutputDescriptor::setVolume(float volumeDb,
+bool HwAudioOutputDescriptor::setVolume(float volumeDb, bool muted,
                                         VolumeSource volumeSource, const StreamTypeVector &streams,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
                                         bool force)
 {
     bool changed = AudioOutputDescriptor::setVolume(
-            volumeDb, volumeSource, streams, deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force);
 
     if (changed) {
       // TODO: use gain controller on source device if any to adjust volume
@@ -782,6 +835,16 @@
     return false;
 }
 
+bool SwAudioOutputCollection::isStrategyActive(product_strategy_t ps) const
+{
+    for (size_t i = 0; i < size(); i++) {
+        if (valueAt(i)->isStrategyActive(ps)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 audio_io_handle_t SwAudioOutputCollection::getA2dpOutput() const
 {
     for (size_t i = 0; i < size(); i++) {
@@ -857,13 +920,25 @@
         }
     }
 }
+bool SwAudioOutputCollection::isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const {
+    for (size_t i = 0; i < size(); i++) {
+        const sp<SwAudioOutputDescriptor> outputDesc = valueAt(i);
+        if (outputDesc->isActive()
+                && outputDesc->devices().containsDeviceAmongTypes(deviceTypes)) {
+            return true;
+        }
+    }
+    return false;
+}
 
 void SwAudioOutputCollection::dump(String8 *dst) const
 {
-    dst->append("\nOutputs dump:\n");
+    dst->appendFormat("\n Outputs (%zu):\n", size());
     for (size_t i = 0; i < size(); i++) {
-        dst->appendFormat("- Output %d dump:\n", keyAt(i));
-        valueAt(i)->dump(dst);
+        const std::string prefix = base::StringPrintf("  %zu. ", i + 1);
+        const std::string extraInfo = base::StringPrintf("I/O handle: %d", keyAt(i));
+        dst->appendFormat("%s", prefix.c_str());
+        valueAt(i)->dump(dst, prefix.size(), extraInfo.c_str());
     }
 }
 
@@ -882,10 +957,12 @@
 
 void HwAudioOutputCollection::dump(String8 *dst) const
 {
-    dst->append("\nOutputs dump:\n");
+    dst->appendFormat("\n Outputs (%zu):\n", size());
     for (size_t i = 0; i < size(); i++) {
-        dst->appendFormat("- Output %d dump:\n", keyAt(i));
-        valueAt(i)->dump(dst);
+        const std::string prefix = base::StringPrintf("  %zu. ", i + 1);
+        const std::string extraInfo = base::StringPrintf("I/O handle: %d", keyAt(i));
+        dst->appendFormat("%s", prefix.c_str());
+        valueAt(i)->dump(dst, prefix.size(), extraInfo.c_str());
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
index d79110a..4f03db9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
@@ -20,7 +20,9 @@
 #include "AudioPatch.h"
 #include "TypeConverter.h"
 
+#include <android-base/stringprintf.h>
 #include <log/log.h>
+#include <media/AudioDeviceTypeAddr.h>
 #include <utils/String8.h>
 
 namespace android {
@@ -37,20 +39,21 @@
 {
     for (int i = 0; i < count; ++i) {
         const audio_port_config &cfg = cfgs[i];
-        dst->appendFormat("%*s  [%s %d] ", spaces, "", prefix, i + 1);
+        dst->appendFormat("%*s[%s %d] ", spaces, "", prefix, i + 1);
         if (cfg.type == AUDIO_PORT_TYPE_DEVICE) {
-            dst->appendFormat("Device ID %d %s", cfg.id, toString(cfg.ext.device.type).c_str());
+            AudioDeviceTypeAddr device(cfg.ext.device.type, cfg.ext.device.address);
+            dst->appendFormat("Device Port ID: %d; {%s}",
+                    cfg.id, device.toString(true /*includeSensitiveInfo*/).c_str());
         } else {
-            dst->appendFormat("Mix ID %d I/O handle %d", cfg.id, cfg.ext.mix.handle);
+            dst->appendFormat("Mix Port ID: %d; I/O handle: %d;", cfg.id, cfg.ext.mix.handle);
         }
         dst->append("\n");
     }
 }
 
-void AudioPatch::dump(String8 *dst, int spaces, int index) const
+void AudioPatch::dump(String8 *dst, int spaces) const
 {
-    dst->appendFormat("%*sPatch %d: owner uid %4d, handle %2d, af handle %2d\n",
-            spaces, "", index + 1, mUid, mHandle, mAfPatchHandle);
+    dst->appendFormat("owner uid %4d; handle %2d; af handle %2d\n", mUid, mHandle, mAfPatchHandle);
     dumpPatchEndpoints(dst, spaces, "src ", mPatch.num_sources, mPatch.sources);
     dumpPatchEndpoints(dst, spaces, "sink", mPatch.num_sinks, mPatch.sinks);
 }
@@ -135,9 +138,11 @@
 
 void AudioPatchCollection::dump(String8 *dst) const
 {
-    dst->append("\nAudio Patches:\n");
+    dst->appendFormat("\n Audio Patches (%zu):\n", size());
     for (size_t i = 0; i < size(); i++) {
-        valueAt(i)->dump(dst, 2, i);
+        const std::string prefix = base::StringPrintf("  %zu. ", i + 1);
+        dst->appendFormat("%s", prefix.c_str());
+        valueAt(i)->dump(dst, prefix.size());
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b209a88..546f56b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -675,7 +675,7 @@
 
 void AudioPolicyMixCollection::dump(String8 *dst) const
 {
-    dst->append("\nAudio Policy Mix:\n");
+    dst->append("\n Audio Policy Mix:\n");
     for (size_t i = 0; i < size(); i++) {
         itemAt(i)->dump(dst, 2, i);
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
index 866417e..53cc473 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioRoute.cpp
@@ -25,15 +25,16 @@
 
 void AudioRoute::dump(String8 *dst, int spaces) const
 {
-    dst->appendFormat("%*s- Type: %s\n", spaces, "", mType == AUDIO_ROUTE_MUX ? "Mux" : "Mix");
-    dst->appendFormat("%*s- Sink: %s\n", spaces, "", mSink->getTagName().c_str());
+    dst->appendFormat("%s; Sink: \"%s\"\n",
+            mType == AUDIO_ROUTE_MUX ? "Mux" : "Mix", mSink->getTagName().c_str());
     if (mSources.size() != 0) {
-        dst->appendFormat("%*s- Sources: \n", spaces, "");
+        dst->appendFormat("%*sSources: ", spaces, "");
         for (size_t i = 0; i < mSources.size(); i++) {
-            dst->appendFormat("%*s%s \n", spaces + 4, "", mSources[i]->getTagName().c_str());
+            dst->appendFormat("\"%s\"", mSources[i]->getTagName().c_str());
+            if (i + 1 < mSources.size()) dst->append(", ");
         }
+        dst->append("\n");
     }
-    dst->append("\n");
 }
 
 bool AudioRoute::supportsPatch(const sp<PolicyAudioPort> &srcPort,
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index afc4d01..713b0ac 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -18,9 +18,12 @@
 //#define LOG_NDEBUG 0
 
 #include <sstream>
+
+#include <android-base/stringprintf.h>
+#include <TypeConverter.h>
 #include <utils/Log.h>
 #include <utils/String8.h>
-#include <TypeConverter.h>
+
 #include "AudioOutputDescriptor.h"
 #include "AudioPatch.h"
 #include "AudioPolicyMix.h"
@@ -39,35 +42,36 @@
     return ss.str();
 }
 
-void ClientDescriptor::dump(String8 *dst, int spaces, int index) const
+void ClientDescriptor::dump(String8 *dst, int spaces) const
 {
-    dst->appendFormat("%*sClient %d:\n", spaces, "", index+1);
-    dst->appendFormat("%*s- Port Id: %d Session Id: %d UID: %d\n", spaces, "",
-             mPortId, mSessionId, mUid);
-    dst->appendFormat("%*s- Format: %08x Sampling rate: %d Channels: %08x\n", spaces, "",
-             mConfig.format, mConfig.sample_rate, mConfig.channel_mask);
-    dst->appendFormat("%*s- Attributes: %s\n", spaces, "", toString(mAttributes).c_str());
-    dst->appendFormat("%*s- Preferred Device Id: %08x\n", spaces, "", mPreferredDeviceId);
-    dst->appendFormat("%*s- State: %s\n", spaces, "", mActive ? "Active" : "Inactive");
+    dst->appendFormat("Port ID: %d; Session ID: %d; uid %d; State: %s\n",
+            mPortId, mSessionId, mUid, mActive ? "Active" : "Inactive");
+    dst->appendFormat("%*s%s; %d; Channel mask: 0x%x\n", spaces, "",
+            audio_format_to_string(mConfig.format), mConfig.sample_rate, mConfig.channel_mask);
+    dst->appendFormat("%*sAttributes: %s\n", spaces, "", toString(mAttributes).c_str());
+    if (mPreferredDeviceId != AUDIO_PORT_HANDLE_NONE) {
+        dst->appendFormat("%*sPreferred Device Port ID: %d;\n", spaces, "", mPreferredDeviceId);
+    }
 }
 
-void TrackClientDescriptor::dump(String8 *dst, int spaces, int index) const
+void TrackClientDescriptor::dump(String8 *dst, int spaces) const
 {
-    ClientDescriptor::dump(dst, spaces, index);
-    dst->appendFormat("%*s- Stream: %d flags: %08x\n", spaces, "", mStream, mFlags);
-    dst->appendFormat("%*s- Refcount: %d\n", spaces, "", mActivityCount);
-    dst->appendFormat("%*s- DAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
-    dst->appendFormat("%*s- DAP Secondary Outputs:\n", spaces, "");
-    for (auto desc : mSecondaryOutputs) {
-        dst->appendFormat("%*s  - %d\n", spaces, "",
-                desc.promote() == nullptr ? 0 : desc.promote()->mIoHandle);
+    ClientDescriptor::dump(dst, spaces);
+    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d\n", spaces, "",
+            mStream, mFlags, mActivityCount);
+    dst->appendFormat("%*sDAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
+    if (!mSecondaryOutputs.empty()) {
+        dst->appendFormat("%*sDAP Secondary Outputs: ", spaces - 2, "");
+        for (auto desc : mSecondaryOutputs) {
+            dst->appendFormat("%d, ", desc.promote() == nullptr ? 0 : desc.promote()->mIoHandle);
+        }
+        dst->append("\n");
     }
 }
 
 std::string TrackClientDescriptor::toShortString() const
 {
     std::stringstream ss;
-
     ss << ClientDescriptor::toShortString() << " Stream: " << mStream;
     return ss.str();
 }
@@ -81,10 +85,11 @@
     }
 }
 
-void RecordClientDescriptor::dump(String8 *dst, int spaces, int index) const
+void RecordClientDescriptor::dump(String8 *dst, int spaces) const
 {
-    ClientDescriptor::dump(dst, spaces, index);
-    dst->appendFormat("%*s- Source: %d flags: %08x\n", spaces, "", mSource, mFlags);
+    ClientDescriptor::dump(dst, spaces);
+    dst->appendFormat("%*sSource: %d; Flags: %08x; is soundtrigger: %d\n",
+            spaces, "", mSource, mFlags, mIsSoundTrigger);
     mEnabledEffects.dump(dst, spaces + 2 /*spaces*/, false /*verbose*/);
 }
 
@@ -95,7 +100,8 @@
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
-        {} /* Sources do not support secondary outputs*/, nullptr), mSrcDevice(srcDevice)
+        {} /* Sources do not support secondary outputs*/, nullptr),
+    mSrcDevice(srcDevice)
 {
 }
 
@@ -109,18 +115,21 @@
     mHwOutput = hwOutput;
 }
 
-void SourceClientDescriptor::dump(String8 *dst, int spaces, int index) const
+void SourceClientDescriptor::dump(String8 *dst, int spaces) const
 {
-    TrackClientDescriptor::dump(dst, spaces, index);
-    dst->appendFormat("%*s- Device:\n", spaces, "");
-    mSrcDevice->dump(dst, 2, 0);
+    TrackClientDescriptor::dump(dst, spaces);
+    const std::string prefix = base::StringPrintf("%*sDevice: ", spaces, "");
+    dst->appendFormat("%s", prefix.c_str());
+    mSrcDevice->dump(dst, prefix.size());
 }
 
 void SourceClientCollection::dump(String8 *dst) const
 {
-    dst->append("\nAudio sources:\n");
+    dst->appendFormat("\n Audio sources (%zu):\n", size());
     for (size_t i = 0; i < size(); i++) {
-        valueAt(i)->dump(dst, 2, i);
+        const std::string prefix = base::StringPrintf("  %zu. ", i + 1);
+        dst->appendFormat("%s", prefix.c_str());
+        valueAt(i)->dump(dst, prefix.size());
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 1722032..a909331 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -19,10 +19,11 @@
 
 #include <set>
 
-#include <AudioPolicyInterface.h>
+#include <android-base/stringprintf.h>
 #include <audio_utils/string.h>
 #include <media/AudioParameter.h>
 #include <media/TypeConverter.h>
+#include <AudioPolicyInterface.h>
 #include "DeviceDescriptor.h"
 #include "TypeConverter.h"
 #include "HwModule.h"
@@ -54,19 +55,10 @@
 DeviceDescriptor::DeviceDescriptor(const AudioDeviceTypeAddr &deviceTypeAddr,
                                    const std::string &tagName,
                                    const FormatVector &encodedFormats) :
-        DeviceDescriptorBase(deviceTypeAddr), mTagName(tagName), mEncodedFormats(encodedFormats),
+        DeviceDescriptorBase(deviceTypeAddr, encodedFormats), mTagName(tagName),
         mDeclaredAddress(DeviceDescriptorBase::address())
 {
     mCurrentEncodedFormat = AUDIO_FORMAT_DEFAULT;
-    /* If framework runs against a pre 5.0 Audio HAL, encoded formats are absent from the config.
-     * FIXME: APM should know the version of the HAL and don't add the formats for V5.0.
-     * For now, the workaround to remove AC3 and IEC61937 support on HDMI is to declare
-     * something like 'encodedFormats="AUDIO_FORMAT_PCM_16_BIT"' on the HDMI devicePort.
-     */
-    if (mDeviceTypeAddr.mType == AUDIO_DEVICE_OUT_HDMI && mEncodedFormats.empty()) {
-        mEncodedFormats.push_back(AUDIO_FORMAT_AC3);
-        mEncodedFormats.push_back(AUDIO_FORMAT_IEC61937);
-    }
 }
 
 void DeviceDescriptor::attach(const sp<HwModule>& module)
@@ -118,20 +110,6 @@
     return (mCurrentEncodedFormat != AUDIO_FORMAT_DEFAULT);
 }
 
-bool DeviceDescriptor::supportsFormat(audio_format_t format)
-{
-    if (mEncodedFormats.empty()) {
-        return true;
-    }
-
-    for (const auto& devFormat : mEncodedFormats) {
-        if (devFormat == format) {
-            return true;
-        }
-    }
-    return false;
-}
-
 status_t DeviceDescriptor::applyAudioPortConfig(const struct audio_port_config *config,
                                                 audio_port_config *backupConfig)
 {
@@ -141,7 +119,6 @@
     toAudioPortConfig(&localBackupConfig);
     if ((status = validationBeforeApplyConfig(config)) == NO_ERROR) {
         AudioPortConfig::applyAudioPortConfig(config, backupConfig);
-        applyPolicyAudioPortConfig(config);
     }
 
     if (backupConfig != NULL) {
@@ -154,8 +131,6 @@
                                          const struct audio_port_config *srcConfig) const
 {
     DeviceDescriptorBase::toAudioPortConfig(dstConfig, srcConfig);
-    toPolicyAudioPortConfig(dstConfig, srcConfig);
-
     dstConfig->ext.device.hw_module = getModuleHandle();
 }
 
@@ -202,15 +177,15 @@
     }
 }
 
-void DeviceDescriptor::dump(String8 *dst, int spaces, int index, bool verbose) const
+void DeviceDescriptor::dump(String8 *dst, int spaces, bool verbose) const
 {
     String8 extraInfo;
     if (!mTagName.empty()) {
-        extraInfo.appendFormat("%*s- tag name: %s\n", spaces, "", mTagName.c_str());
+        extraInfo.appendFormat("\"%s\"", mTagName.c_str());
     }
 
     std::string descBaseDumpStr;
-    DeviceDescriptorBase::dump(&descBaseDumpStr, spaces, index, extraInfo.string(), verbose);
+    DeviceDescriptorBase::dump(&descBaseDumpStr, spaces, extraInfo.string(), verbose);
     dst->append(descBaseDumpStr.c_str());
 }
 
@@ -473,9 +448,11 @@
     if (isEmpty()) {
         return;
     }
-    dst->appendFormat("%*s- %s devices:\n", spaces, "", tag.string());
+    dst->appendFormat("%*s%s devices (%zu):\n", spaces, "", tag.string(), size());
     for (size_t i = 0; i < size(); i++) {
-        itemAt(i)->dump(dst, spaces + 2, i, verbose);
+        const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", i + 1);
+        dst->appendFormat("%s", prefix.c_str());
+        itemAt(i)->dump(dst, prefix.size(), verbose);
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 843f5da..3f9c8b0 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "APM::EffectDescriptor"
 //#define LOG_NDEBUG 0
 
+#include <android-base/stringprintf.h>
 #include "EffectDescriptor.h"
 #include <utils/String8.h>
 
@@ -24,13 +25,11 @@
 
 void EffectDescriptor::dump(String8 *dst, int spaces) const
 {
-    dst->appendFormat("%*sID: %d\n", spaces, "", mId);
-    dst->appendFormat("%*sI/O: %d\n", spaces, "", mIo);
-    dst->appendFormat("%*sMusic Effect: %s\n", spaces, "", isMusicEffect()? "yes" : "no");
-    dst->appendFormat("%*sSession: %d\n", spaces, "", mSession);
-    dst->appendFormat("%*sName: %s\n", spaces, "",  mDesc.name);
-    dst->appendFormat("%*s%s\n", spaces, "",  mEnabled ? "Enabled" : "Disabled");
-    dst->appendFormat("%*s%s\n", spaces, "",  mSuspended ? "Suspended" : "Active");
+    dst->appendFormat("Effect ID: %d; Attached to I/O handle: %d; Session: %d;\n",
+            mId, mIo, mSession);
+    dst->appendFormat("%*sMusic Effect? %s; \"%s\"; %s; %s\n", spaces, "",
+            isMusicEffect()? "yes" : "no", mDesc.name,
+            mEnabled ? "Enabled" : "Disabled", mSuspended ? "Suspended" : "Active");
 }
 
 EffectDescriptorCollection::EffectDescriptorCollection() :
@@ -237,10 +236,14 @@
             mTotalEffectsMemory,
             mTotalEffectsMemoryMaxUsed);
     }
-    dst->appendFormat("%*sEffects:\n", spaces, "");
-    for (size_t i = 0; i < size(); i++) {
-        dst->appendFormat("%*s- Effect %d:\n", spaces, "", keyAt(i));
-        valueAt(i)->dump(dst, spaces + 2);
+    if (size() > 0) {
+        if (spaces > 1) spaces -= 2;
+        dst->appendFormat("%*s- Effects (%zu):\n", spaces, "", size());
+        for (size_t i = 0; i < size(); i++) {
+            const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", i + 1);
+            dst->appendFormat("%s", prefix.c_str());
+            valueAt(i)->dump(dst, prefix.size());
+        }
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 3a143b0..418b7eb 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -17,11 +17,13 @@
 #define LOG_TAG "APM::HwModule"
 //#define LOG_NDEBUG 0
 
-#include "HwModule.h"
-#include "IOProfile.h"
+#include <android-base/stringprintf.h>
 #include <policy.h>
 #include <system/audio.h>
 
+#include "HwModule.h"
+#include "IOProfile.h"
+
 namespace android {
 
 HwModule::HwModule(const char *name, uint32_t halVersionMajor, uint32_t halVersionMinor)
@@ -247,28 +249,28 @@
     return false;
 }
 
-void HwModule::dump(String8 *dst) const
+void HwModule::dump(String8 *dst, int spaces) const
 {
-    dst->appendFormat("  - name: %s\n", getName());
-    dst->appendFormat("  - handle: %d\n", mHandle);
-    dst->appendFormat("  - version: %u.%u\n", getHalVersionMajor(), getHalVersionMinor());
+    dst->appendFormat("Handle: %d; \"%s\"\n", mHandle, getName());
     if (mOutputProfiles.size()) {
-        dst->append("  - outputs:\n");
+        dst->appendFormat("%*s- Output MixPorts (%zu):\n", spaces - 2, "", mOutputProfiles.size());
         for (size_t i = 0; i < mOutputProfiles.size(); i++) {
-            dst->appendFormat("    output %zu:\n", i);
-            mOutputProfiles[i]->dump(dst);
+            const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", i + 1);
+            dst->append(prefix.c_str());
+            mOutputProfiles[i]->dump(dst, prefix.size());
         }
     }
     if (mInputProfiles.size()) {
-        dst->append("  - inputs:\n");
+        dst->appendFormat("%*s- Input MixPorts (%zu):\n", spaces - 2, "", mInputProfiles.size());
         for (size_t i = 0; i < mInputProfiles.size(); i++) {
-            dst->appendFormat("    input %zu:\n", i);
-            mInputProfiles[i]->dump(dst);
+            const std::string prefix = base::StringPrintf("%*s %zu. ", spaces, "", i + 1);
+            dst->append(prefix.c_str());
+            mInputProfiles[i]->dump(dst, prefix.size());
         }
     }
-    mDeclaredDevices.dump(dst, String8("Declared"), 2, true);
-    mDynamicDevices.dump(dst, String8("Dynamic"),  2, true);
-    dumpAudioRouteVector(mRoutes, dst, 2);
+    mDeclaredDevices.dump(dst, String8("- Declared"), spaces - 2, true);
+    mDynamicDevices.dump(dst, String8("- Dynamic"),  spaces - 2, true);
+    dumpAudioRouteVector(mRoutes, dst, spaces);
 }
 
 sp <HwModule> HwModuleCollection::getModuleFromName(const char *name) const
@@ -462,10 +464,11 @@
 
 void HwModuleCollection::dump(String8 *dst) const
 {
-    dst->append("\nHW Modules dump:\n");
+    dst->appendFormat("\n Hardware modules (%zu):\n", size());
     for (size_t i = 0; i < size(); i++) {
-        dst->appendFormat("- HW Module %zu:\n", i + 1);
-        itemAt(i)->dump(dst);
+        const std::string prefix = base::StringPrintf("  %zu. ", i + 1);
+        dst->append(prefix.c_str());
+        itemAt(i)->dump(dst, prefix.size());
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 09b614d..21f2018 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -116,27 +116,30 @@
                 return device == deviceDesc && deviceDesc->hasCurrentEncodedFormat(); }) == 1;
 }
 
-void IOProfile::dump(String8 *dst) const
+void IOProfile::dump(String8 *dst, int spaces) const
 {
-    std::string portStr;
-    AudioPort::dump(&portStr, 4);
-    dst->append(portStr.c_str());
-
-    dst->appendFormat("    - flags: 0x%04x", getFlags());
+    String8 extraInfo;
+    extraInfo.appendFormat("0x%04x", getFlags());
     std::string flagsLiteral =
             getRole() == AUDIO_PORT_ROLE_SINK ?
             toString(static_cast<audio_input_flags_t>(getFlags())) :
             getRole() == AUDIO_PORT_ROLE_SOURCE ?
             toString(static_cast<audio_output_flags_t>(getFlags())) : "";
     if (!flagsLiteral.empty()) {
-        dst->appendFormat(" (%s)", flagsLiteral.c_str());
+        extraInfo.appendFormat(" (%s)", flagsLiteral.c_str());
     }
-    dst->append("\n");
-    mSupportedDevices.dump(dst, String8("Supported"), 4, false);
-    dst->appendFormat("\n    - maxOpenCount: %u - curOpenCount: %u\n",
-             maxOpenCount, curOpenCount);
-    dst->appendFormat("    - maxActiveCount: %u - curActiveCount: %u\n",
-             maxActiveCount, curActiveCount);
+
+    std::string portStr;
+    AudioPort::dump(&portStr, spaces, extraInfo.c_str());
+    dst->append(portStr.c_str());
+
+    mSupportedDevices.dump(dst, String8("- Supported"), spaces - 2, false);
+    dst->appendFormat("%*s- maxOpenCount: %u; curOpenCount: %u\n",
+            spaces - 2, "", maxOpenCount, curOpenCount);
+    dst->appendFormat("%*s- maxActiveCount: %u; curActiveCount: %u\n",
+            spaces - 2, "", maxActiveCount, curActiveCount);
+    dst->appendFormat("%*s- recommendedMuteDurationMs: %u ms\n",
+            spaces - 2, "", recommendedMuteDurationMs);
 }
 
 void IOProfile::log()
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index 8c61b90..ce8178f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -30,9 +30,9 @@
 // --- PolicyAudioPort class implementation
 void PolicyAudioPort::attach(const sp<HwModule>& module)
 {
+    mModule = module;
     ALOGV("%s: attaching module %s to port %s",
             __FUNCTION__, getModuleName(), asAudioPort()->getName().c_str());
-    mModule = module;
 }
 
 void PolicyAudioPort::detach()
@@ -87,7 +87,7 @@
     // For direct outputs, pick minimum sampling rate: this helps ensuring that the
     // channel count / sampling rate combination chosen will be supported by the connected
     // sink
-    if (isDirectOutput()) {
+    if (asAudioPort()->isDirectOutput()) {
         uint32_t samplingRate = UINT_MAX;
         for (const auto rate : samplingRates) {
             if ((rate < samplingRate) && (rate > 0)) {
@@ -122,7 +122,7 @@
     // For direct outputs, pick minimum channel count: this helps ensuring that the
     // channel count / sampling rate combination chosen will be supported by the connected
     // sink
-    if (isDirectOutput()) {
+    if (asAudioPort()->isDirectOutput()) {
         uint32_t channelCount = UINT_MAX;
         for (const auto channelMask : channelMasks) {
             uint32_t cnlCount;
@@ -236,7 +236,7 @@
     audio_format_t bestFormat = sPcmFormatCompareTable[ARRAY_SIZE(sPcmFormatCompareTable) - 1];
     // For mixed output and inputs, use best mixer output format.
     // Do not limit format otherwise
-    if ((asAudioPort()->getType() != AUDIO_PORT_TYPE_MIX) || isDirectOutput()) {
+    if ((asAudioPort()->getType() != AUDIO_PORT_TYPE_MIX) || asAudioPort()->isDirectOutput()) {
         bestFormat = AUDIO_FORMAT_INVALID;
     }
 
@@ -266,29 +266,4 @@
             asAudioPort()->getName().c_str(), samplingRate, channelMask, format);
 }
 
-// --- PolicyAudioPortConfig class implementation
-
-status_t PolicyAudioPortConfig::validationBeforeApplyConfig(
-        const struct audio_port_config *config) const
-{
-    sp<PolicyAudioPort> policyAudioPort = getPolicyAudioPort();
-    return policyAudioPort ? policyAudioPort->checkExactAudioProfile(config) : NO_INIT;
-}
-
-void PolicyAudioPortConfig::toPolicyAudioPortConfig(struct audio_port_config *dstConfig,
-                                                    const struct audio_port_config *srcConfig) const
-{
-    if (dstConfig->config_mask & AUDIO_PORT_CONFIG_FLAGS) {
-        if ((srcConfig != nullptr) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_FLAGS)) {
-            dstConfig->flags = srcConfig->flags;
-        } else {
-            dstConfig->flags = mFlags;
-        }
-    } else {
-        dstConfig->flags = { AUDIO_INPUT_FLAG_NONE };
-    }
-}
-
-
-
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 84ed656..d446e96 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -123,6 +123,7 @@
         static constexpr const char *flags = "flags";
         static constexpr const char *maxOpenCount = "maxOpenCount";
         static constexpr const char *maxActiveCount = "maxActiveCount";
+        static constexpr const char *recommendedMuteDurationMs = "recommendedMuteDurationMs";
     };
 
     // Children: GainTraits
@@ -482,7 +483,14 @@
     if (!flags.empty()) {
         // Source role
         if (portRole == AUDIO_PORT_ROLE_SOURCE) {
-            mixPort->setFlags(OutputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str()));
+            //TODO: b/193496180 use spatializer flag at audio HAL when available until then,
+            // use DEEP_BUFFER+FAST flag combo to indicate the spatializer output profile
+            uint32_t intFlags =
+                    OutputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str());
+            if (intFlags == (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_DEEP_BUFFER)) {
+                intFlags = AUDIO_OUTPUT_FLAG_SPATIALIZER;
+            }
+            mixPort->setFlags(intFlags);
         } else {
             // Sink role
             mixPort->setFlags(InputFlagConverter::maskFromString(flags, mFlagsSeparator.c_str()));
@@ -496,6 +504,13 @@
     if (!maxActiveCount.empty()) {
         convertTo(maxActiveCount, mixPort->maxActiveCount);
     }
+
+    std::string recommendedmuteDurationMsLiteral =
+            getXmlAttribute(child, Attributes::recommendedMuteDurationMs);
+    if (!recommendedmuteDurationMsLiteral.empty()) {
+        convertTo(recommendedmuteDurationMsLiteral, mixPort->recommendedMuteDurationMs);
+    }
+
     // Deserialize children
     AudioGainTraits::Collection gains;
     status = deserializeCollection<AudioGainTraits>(child, &gains, NULL);
@@ -861,10 +876,10 @@
         ALOGE("%s: No version found in root node %s", __func__, rootName);
         return BAD_VALUE;
     }
-    if (version == "7.0") {
+    if (version == "7.0" || version == "7.1") {
         mChannelMasksSeparator = mSamplingRatesSeparator = mFlagsSeparator = " ";
     } else if (version != "1.0") {
-        ALOGE("%s: Version does not match; expected \"1.0\" or \"7.0\" got \"%s\"",
+        ALOGE("%s: Version does not match; expected \"1.0\", \"7.0\", or \"7.1\" got \"%s\"",
                 __func__, version.c_str());
         return BAD_VALUE;
     }
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 98415b7..ce78eb0 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -10,18 +10,6 @@
                      samplingRates="24000,16000"
                      channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
-        <!-- Le Audio Audio Ports -->
-        <mixPort name="le audio output" role="source">
-            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
-                     samplingRates="8000,16000,24000,32000,44100,48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
-            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
-                     samplingRates="8000,16000,24000,32000,44100,48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
-            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
-                     samplingRates="8000,16000,24000,32000,44100,48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
-        </mixPort>
     </mixPorts>
     <devicePorts>
         <!-- A2DP Audio Ports -->
@@ -42,13 +30,6 @@
         </devicePort>
         <!-- Hearing AIDs Audio Ports -->
         <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
-        <!-- BLE Audio Ports -->
-        <!-- Note that these device types are not valid in HAL versions < 7. Any device
-             running pre-V7 HAL and using this file will not pass VTS. Need to use
-             bluetooth_audio_policy_configuration_7_0.xml instead.
-        -->
-        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
-        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -59,9 +40,5 @@
                sources="a2dp output"/>
         <route type="mix" sink="BT Hearing Aid Out"
                sources="hearing aid output"/>
-        <route type="mix" sink="BLE Headset Out"
-               sources="le audio output"/>
-        <route type="mix" sink="BLE Speaker Out"
-               sources="le audio output"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index fbe7571..2dffe02 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -10,18 +10,6 @@
                      samplingRates="24000 16000"
                      channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
-        <!-- Le Audio Audio Ports -->
-        <mixPort name="le audio output" role="source">
-            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
-                     samplingRates="8000 16000 24000 32000 44100 48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
-            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
-                     samplingRates="8000 16000 24000 32000 44100 48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
-            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
-                     samplingRates="8000 16000 24000 32000 44100 48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
-        </mixPort>
     </mixPorts>
     <devicePorts>
         <!-- A2DP Audio Ports -->
@@ -42,9 +30,6 @@
         </devicePort>
         <!-- Hearing AIDs Audio Ports -->
         <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
-        <!-- BLE Audio Ports -->
-        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
-        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -55,9 +40,5 @@
                sources="a2dp output"/>
         <route type="mix" sink="BT Hearing Aid Out"
                sources="hearing aid output"/>
-        <route type="mix" sink="BLE Headset Out"
-               sources="le audio output"/>
-        <route type="mix" sink="BLE Speaker Out"
-               sources="le audio output"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration.xml
new file mode 100644
index 0000000..d34cca0
--- /dev/null
+++ b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Audio HAL Audio Policy Configuration file -->
+<module name="bluetooth" halVersion="2.0">
+    <mixPorts>
+        <!-- A2DP Audio Ports -->
+        <mixPort name="a2dp output" role="source"/>
+        <!-- Hearing AIDs Audio Ports -->
+        <mixPort name="hearing aid output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="24000,16000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <!-- Le Audio Audio Ports -->
+        <mixPort name="le audio output" role="source"/>
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <!-- A2DP Audio Ports -->
+        <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100,48000,88200,96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100,48000,88200,96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100,48000,88200,96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <!-- Hearing AIDs Audio Ports -->
+        <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+        <!-- BLE Audio Ports -->
+        <!-- Note that these device types are not valid in HAL versions < 7. Any device
+             running pre-V7 HAL and using this file will not pass VTS. Need to use
+             bluetooth_audio_policy_configuration_7_0.xml instead.
+        -->
+        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
+        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT A2DP Out"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Headphones"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Speaker"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT Hearing Aid Out"
+               sources="hearing aid output"/>
+        <route type="mix" sink="BLE Headset Out"
+               sources="le audio output"/>
+        <route type="mix" sink="le audio input"
+               sources="BLE Headset In"/>
+        <route type="mix" sink="BLE Speaker Out"
+               sources="le audio output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
new file mode 100644
index 0000000..e7908eb
--- /dev/null
+++ b/services/audiopolicy/config/bluetooth_with_le_audio_policy_configuration_7_0.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Bluetooth Audio HAL Audio Policy Configuration file -->
+<module name="bluetooth" halVersion="2.0">
+    <mixPorts>
+        <!-- A2DP Audio Ports -->
+        <mixPort name="a2dp output" role="source"/>
+        <!-- Hearing AIDs Audio Ports -->
+        <mixPort name="hearing aid output" role="source">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="24000 16000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </mixPort>
+        <!-- Le Audio Audio Ports -->
+        <mixPort name="le audio output" role="source" />
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+            <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000 16000 24000 32000 44100 48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
+    </mixPorts>
+    <devicePorts>
+        <!-- A2DP Audio Ports -->
+        <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                     samplingRates="44100 48000 88200 96000"
+                     channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+        </devicePort>
+        <!-- Hearing AIDs Audio Ports -->
+        <devicePort tagName="BT Hearing Aid Out" type="AUDIO_DEVICE_OUT_HEARING_AID" role="sink"/>
+        <!-- BLE Audio Ports -->
+        <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
+        <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
+    </devicePorts>
+    <routes>
+        <route type="mix" sink="BT A2DP Out"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Headphones"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT A2DP Speaker"
+               sources="a2dp output"/>
+        <route type="mix" sink="BT Hearing Aid Out"
+               sources="hearing aid output"/>
+        <route type="mix" sink="BLE Headset Out"
+               sources="le audio output"/>
+        <route type="mix" sink="le audio input"
+               sources="BLE Headset In"/>
+        <route type="mix" sink="BLE Speaker Out"
+               sources="le audio output"/>
+    </routes>
+</module>
diff --git a/services/audiopolicy/config/le_audio_policy_configuration.xml b/services/audiopolicy/config/le_audio_policy_configuration.xml
index a3dc72b..dcdd805 100644
--- a/services/audiopolicy/config/le_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/le_audio_policy_configuration.xml
@@ -7,13 +7,20 @@
                      samplingRates="8000,16000,24000,32000,44100,48000"
                      channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT,AUDIO_FORMAT_PCM_24_BIT,AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BLE Headset Out" sources="le audio output"/>
         <route type="mix" sink="BLE Speaker Out" sources="le audio output"/>
+        <route type="mix" sink="le audio input" sources="BLE Headset In"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 0f8b0a5..7d21ae0 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -87,10 +87,17 @@
 
     status_t listAudioVolumeGroups(AudioVolumeGroupVector &groups) const override;
 
+    /**
+     * Get the list of currently connected removable device types ordered from most recently
+     * connected to least recently connected.
+     * @param group the device group to consider: wired, a2dp... If none, consider all groups.
+     * @param excludedDevices list of device types to ignore
+     * @return a potentially empty ordered list of connected removable devices.
+     */
     std::vector<audio_devices_t> getLastRemovableMediaDevices(
-            device_out_group_t group = GROUP_NONE) const
-    {
-        return mLastRemovableMediaDevices.getLastRemovableMediaDevices(group);
+            device_out_group_t group = GROUP_NONE,
+            std::vector<audio_devices_t> excludedDevices = {}) const {
+        return mLastRemovableMediaDevices.getLastRemovableMediaDevices(group, excludedDevices);
     }
 
     void dump(String8 *dst) const override;
diff --git a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
index d7f8b1e..2662cd3 100644
--- a/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
+++ b/services/audiopolicy/engine/common/include/LastRemovableMediaDevices.h
@@ -35,7 +35,8 @@
 public:
     void setRemovableMediaDevices(sp<DeviceDescriptor> desc, audio_policy_dev_state_t state);
     std::vector<audio_devices_t> getLastRemovableMediaDevices(
-            device_out_group_t group = GROUP_NONE) const;
+            device_out_group_t group = GROUP_NONE,
+            std::vector<audio_devices_t> excludedDevices = {}) const;
     sp<DeviceDescriptor> getLastRemovableMediaDevice(
             const DeviceVector& excludedDevices, device_out_group_t group = GROUP_NONE) const;
 
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 150a9a8..99507ee 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -71,7 +71,13 @@
                                               audio_policy_dev_state_t state)
 {
     audio_devices_t deviceType = devDesc->type();
-    if ((deviceType != AUDIO_DEVICE_NONE) && audio_is_output_device(deviceType)) {
+    if ((deviceType != AUDIO_DEVICE_NONE) && audio_is_output_device(deviceType)
+            && deviceType != AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET
+            && deviceType != AUDIO_DEVICE_OUT_BLE_BROADCAST) {
+        // USB dock does not follow the rule of last removable device connected wins.
+        // It is only used if no removable device is connected or if set as preferred device
+        // LE audio broadcast device has a specific policy depending on active strategies and
+        // devices and does not follow the rule of last connected removable device.
         mLastRemovableMediaDevices.setRemovableMediaDevices(devDesc, state);
     }
 
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index d39eff6..b036e12 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -69,12 +69,6 @@
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
                AUDIO_FLAG_NONE, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_EVENT,
                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
           }
@@ -128,8 +122,12 @@
     {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER",
      {
          {"", AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
-          {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
-            AUDIO_FLAG_BEACON, ""}}
+          {
+              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+                AUDIO_FLAG_BEACON, ""},
+              {AUDIO_CONTENT_TYPE_ULTRASOUND, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+                AUDIO_FLAG_NONE, ""}
+          }
          }
      },
     }
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
index b3f8947..93122e0 100644
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -44,12 +44,15 @@
 }
 
 std::vector<audio_devices_t> LastRemovableMediaDevices::getLastRemovableMediaDevices(
-        device_out_group_t group) const
+        device_out_group_t group, std::vector<audio_devices_t> excludedDevices) const
 {
     std::vector<audio_devices_t> ret;
     for (auto iter = mMediaDevices.begin(); iter != mMediaDevices.end(); ++iter) {
-        if ((group == GROUP_NONE) || (group == getDeviceOutGroup((iter->desc)->type()))) {
-            ret.push_back((iter->desc)->type());
+        audio_devices_t type = (iter->desc)->type();
+        if ((group == GROUP_NONE || group == getDeviceOutGroup(type))
+                && std::find(excludedDevices.begin(), excludedDevices.end(), type) ==
+                                       excludedDevices.end()) {
+            ret.push_back(type);
         }
     }
     return ret;
@@ -85,6 +88,7 @@
     case AUDIO_DEVICE_OUT_HEARING_AID:
     case AUDIO_DEVICE_OUT_BLE_HEADSET:
     case AUDIO_DEVICE_OUT_BLE_SPEAKER:
+    case AUDIO_DEVICE_OUT_BLE_BROADCAST:
         return GROUP_BT_A2DP;
     default:
         return GROUP_NONE;
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index b3d144f..fbfcf72 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -150,12 +150,8 @@
 void ProductStrategy::dump(String8 *dst, int spaces) const
 {
     dst->appendFormat("\n%*s-%s (id: %d)\n", spaces, "", mName.c_str(), mId);
-    std::string deviceLiteral;
-    if (!deviceTypesToString(mApplicableDevices, deviceLiteral)) {
-        ALOGE("%s: failed to convert device %s",
-              __FUNCTION__, dumpDeviceTypes(mApplicableDevices).c_str());
-    }
-    dst->appendFormat("%*sSelected Device: {type:%s, @:%s}\n", spaces + 2, "",
+    std::string deviceLiteral = deviceTypesToString(mApplicableDevices);
+    dst->appendFormat("%*sSelected Device: {%s, @:%s}\n", spaces + 2, "",
                        deviceLiteral.c_str(), mDeviceAddress.c_str());
 
     for (const auto &attr : mAttributesVector) {
@@ -333,4 +329,3 @@
     dst->appendFormat("\n");
 }
 }
-
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index c565926..2ebb7df 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -70,7 +70,7 @@
 
 using ProductStrategies = std::vector<ProductStrategy>;
 
-using ValuePair = std::pair<uint32_t, std::string>;
+using ValuePair = std::tuple<uint64_t, uint32_t, std::string>;
 using ValuePairs = std::vector<ValuePair>;
 
 struct CriterionType
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 81e803f..6f560d5 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -80,6 +80,7 @@
     struct Attributes {
         static constexpr const char *literal = "literal";
         static constexpr const char *numerical = "numerical";
+        static constexpr const char *androidType = "android_type";
     };
 
     static android::status_t deserialize(_xmlDoc *doc, const _xmlNode *root,
@@ -349,7 +350,16 @@
         ALOGE("%s: No attribute %s found", __FUNCTION__, Attributes::literal);
         return BAD_VALUE;
     }
-    uint32_t numerical = 0;
+    uint32_t androidType = 0;
+    std::string androidTypeliteral = getXmlAttribute(child, Attributes::androidType);
+    if (!androidTypeliteral.empty()) {
+        ALOGV("%s: androidType %s", __FUNCTION__, androidTypeliteral.c_str());
+        if (!convertTo(androidTypeliteral, androidType)) {
+            ALOGE("%s: : Invalid typeset value(%s)", __FUNCTION__, androidTypeliteral.c_str());
+            return BAD_VALUE;
+        }
+    }
+    uint64_t numerical = 0;
     std::string numericalTag = getXmlAttribute(child, Attributes::numerical);
     if (numericalTag.empty()) {
         ALOGE("%s: No attribute %s found", __FUNCTION__, Attributes::literal);
@@ -359,7 +369,7 @@
         ALOGE("%s: : Invalid value(%s)", __FUNCTION__, numericalTag.c_str());
         return BAD_VALUE;
     }
-    values.push_back({numerical, literal});
+    values.push_back({numerical,  androidType, literal});
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index a747822..dc8d9cf 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -41,8 +41,9 @@
         "libaudiopolicyengineconfigurable_pfwwrapper",
 
     ],
-    shared_libs: [
+  shared_libs: [
         "libaudiofoundation",
+        "libbase",
         "liblog",
         "libcutils",
         "libutils",
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index bc32416..0ddf66d 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -57,9 +57,6 @@
     <ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
         <AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
         </AttributesGroup>
     </ProductStrategy>
diff --git a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
index 1fc2264..9fd8b8e 100644
--- a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
+++ b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
@@ -77,12 +77,12 @@
      * Set the input device to be used by an input source.
      *
      * @param[in] inputSource: name of the input source for which the device to use has to be set
-     * @param[in] devices; mask of devices to be used for the given input source.
+     * @param[in] devices: mask of devices to be used for the given input source.
      *
      * @return true if the devices were set correclty for this input source, false otherwise.
      */
     virtual bool setDeviceForInputSource(const audio_source_t &inputSource,
-                                         audio_devices_t device) = 0;
+                                         uint64_t device) = 0;
 
     virtual void setDeviceAddressForProductStrategy(product_strategy_t strategy,
                                                     const std::string &address) = 0;
@@ -91,12 +91,12 @@
      * Set the device to be used by a product strategy.
      *
      * @param[in] strategy: name of the product strategy for which the device to use has to be set
-     * @param[in] devices; mask of devices to be used for the given strategy.
+     * @param[in] devices: mask of devices to be used for the given strategy.
      *
      * @return true if the devices were set correclty for this strategy, false otherwise.
      */
     virtual bool setDeviceTypesForProductStrategy(product_strategy_t strategy,
-                                                  audio_devices_t devices) = 0;
+                                                  uint64_t devices) = 0;
 
     virtual product_strategy_t getProductStrategyByName(const std::string &address) = 0;
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/common/Structure/PolicySubsystem-CommonTypes.xml.in b/services/audiopolicy/engineconfigurable/parameter-framework/examples/common/Structure/PolicySubsystem-CommonTypes.xml.in
index 2e9f37e..2c4c7b5 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/common/Structure/PolicySubsystem-CommonTypes.xml.in
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/common/Structure/PolicySubsystem-CommonTypes.xml.in
@@ -10,8 +10,8 @@
      <!--#################### GLOBAL COMPONENTS END ####################-->
 
     <!-- Automatically filled from audio-base.h file -->
-    <ComponentType Name="OutputDevicesMask" Description="32th bit is not allowed as dedicated for input devices detection">
-        <BitParameterBlock Name="mask" Size="32">
+    <ComponentType Name="OutputDevicesMask" Description="64bit representation of devices">
+        <BitParameterBlock Name="mask" Size="64">
         </BitParameterBlock>
     </ComponentType>
 
@@ -19,8 +19,8 @@
     profile. It must match with the Input device enum parameter.
     -->
     <!-- Automatically filled from audio-base.h file -->
-    <ComponentType Name="InputDevicesMask">
-        <BitParameterBlock Name="mask" Size="32">
+    <ComponentType Name="InputDevicesMask" Description="64bit representation of devices">
+        <BitParameterBlock Name="mask" Size="64">
         </BitParameterBlock>
     </ComponentType>
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
index f8a6fc0..df4e3e9 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/InputSource.cpp
@@ -45,7 +45,7 @@
 
 bool InputSource::sendToHW(string & /*error*/)
 {
-    audio_devices_t applicableInputDevice;
+    uint64_t applicableInputDevice;
     blackboardRead(&applicableInputDevice, sizeof(applicableInputDevice));
     return mPolicyPluginInterface->setDeviceForInputSource(mId, applicableInputDevice);
 }
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
index 6c8eb65..e65946e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.h
@@ -32,7 +32,7 @@
 
     struct Device
     {
-        audio_devices_t applicableDevice; /**< applicable device for this strategy. */
+        uint64_t applicableDevice; /**< applicable device for this strategy. */
         char deviceAddress[mMaxStringSize]; /**< device address associated with this strategy. */
     } __attribute__((packed));
 
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index b0c376a..3d74920 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -36,6 +36,8 @@
 
 #include <media/TypeConverter.h>
 
+#include <cinttypes>
+
 using std::string;
 using std::map;
 
@@ -166,16 +168,13 @@
 status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
                                           audio_policy_dev_state_t state)
 {
-    mPolicyParameterMgr->setDeviceConnectionState(
-                device->type(), device->address().c_str(), state);
+    mPolicyParameterMgr->setDeviceConnectionState(device->type(), device->address(), state);
     if (audio_is_output_device(device->type())) {
-        // FIXME: Use DeviceTypeSet when the interface is ready
         return mPolicyParameterMgr->setAvailableOutputDevices(
-                    deviceTypesToBitMask(getApmObserver()->getAvailableOutputDevices().types()));
+                    getApmObserver()->getAvailableOutputDevices().types());
     } else if (audio_is_input_device(device->type())) {
-        // FIXME: Use DeviceTypeSet when the interface is ready
         return mPolicyParameterMgr->setAvailableInputDevices(
-                    deviceTypesToBitMask(getApmObserver()->getAvailableInputDevices().types()));
+                    getApmObserver()->getAvailableInputDevices().types());
     }
     return EngineBase::setDeviceConnectionState(device, state);
 }
@@ -299,8 +298,13 @@
     if (device != nullptr) {
         return DeviceVector(device);
     }
+    return fromCache? getCachedDevices(strategy) : getDevicesForProductStrategy(strategy);
+}
 
-    return fromCache? mDevicesForStrategies.at(strategy) : getDevicesForProductStrategy(strategy);
+DeviceVector Engine::getCachedDevices(product_strategy_t ps) const
+{
+    return mDevicesForStrategies.find(ps) != mDevicesForStrategies.end() ?
+                mDevicesForStrategies.at(ps) : DeviceVector{};
 }
 
 DeviceVector Engine::getOutputDevicesForStream(audio_stream_type_t stream, bool fromCache) const
@@ -369,17 +373,28 @@
     getProductStrategies().at(strategy)->setDeviceAddress(address);
 }
 
-bool Engine::setDeviceTypesForProductStrategy(product_strategy_t strategy, audio_devices_t devices)
+bool Engine::setDeviceTypesForProductStrategy(product_strategy_t strategy, uint64_t devices)
 {
     if (getProductStrategies().find(strategy) == getProductStrategies().end()) {
-        ALOGE("%s: set device %d on invalid strategy %d", __FUNCTION__, devices, strategy);
+        ALOGE("%s: set device %" PRId64 " on invalid strategy %d", __FUNCTION__, devices, strategy);
         return false;
     }
-    // FIXME: stop using deviceTypesFromBitMask when the interface is ready
-    getProductStrategies().at(strategy)->setDeviceTypes(deviceTypesFromBitMask(devices));
+    // Here device matches the criterion value, need to rebuitd android device types;
+    DeviceTypeSet types =
+            mPolicyParameterMgr->convertDeviceCriterionValueToDeviceTypes(devices, true /*isOut*/);
+    getProductStrategies().at(strategy)->setDeviceTypes(types);
     return true;
 }
 
+bool Engine::setDeviceForInputSource(const audio_source_t &inputSource, uint64_t device)
+{
+    DeviceTypeSet types = mPolicyParameterMgr->convertDeviceCriterionValueToDeviceTypes(
+                device, false /*isOut*/);
+    ALOG_ASSERT(types.size() <= 1, "one input device expected at most");
+    audio_devices_t deviceType = types.empty() ? AUDIO_DEVICE_IN_DEFAULT : *types.begin();
+    return setPropertyForKey<audio_devices_t, audio_source_t>(deviceType, inputSource);
+}
+
 template <>
 EngineInterface *Engine::queryInterface()
 {
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index d8e2742..4b559f0 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -82,15 +82,12 @@
     bool setVolumeProfileForStream(const audio_stream_type_t &stream,
                                    const audio_stream_type_t &volumeProfile) override;
 
-    bool setDeviceForInputSource(const audio_source_t &inputSource, audio_devices_t device) override
-    {
-        return setPropertyForKey<audio_devices_t, audio_source_t>(device, inputSource);
-    }
+    bool setDeviceForInputSource(const audio_source_t &inputSource, uint64_t device) override;
+
     void setDeviceAddressForProductStrategy(product_strategy_t strategy,
                                                     const std::string &address) override;
 
-    bool setDeviceTypesForProductStrategy(product_strategy_t strategy,
-                                                  audio_devices_t devices) override;
+    bool setDeviceTypesForProductStrategy(product_strategy_t strategy, uint64_t devices) override;
 
     product_strategy_t getProductStrategyByName(const std::string &name) override
     {
@@ -126,6 +123,7 @@
     status_t loadAudioPolicyEngineConfig();
 
     DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
+    DeviceVector getCachedDevices(product_strategy_t ps) const;
 
     /**
      * Policy Parameter Manager hidden through a wrapper.
diff --git a/services/audiopolicy/engineconfigurable/src/InputSource.cpp b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
index f4645e6..6fd2b70 100644
--- a/services/audiopolicy/engineconfigurable/src/InputSource.cpp
+++ b/services/audiopolicy/engineconfigurable/src/InputSource.cpp
@@ -26,7 +26,8 @@
 status_t Element<audio_source_t>::setIdentifier(audio_source_t identifier)
 {
     if (identifier > AUDIO_SOURCE_MAX && identifier != AUDIO_SOURCE_HOTWORD
-        && identifier != AUDIO_SOURCE_FM_TUNER && identifier != AUDIO_SOURCE_ECHO_REFERENCE) {
+        && identifier != AUDIO_SOURCE_FM_TUNER && identifier != AUDIO_SOURCE_ECHO_REFERENCE
+        && identifier != AUDIO_SOURCE_ULTRASOUND) {
         return BAD_VALUE;
     }
     mIdentifier = identifier;
@@ -46,12 +47,6 @@
 template <>
 status_t Element<audio_source_t>::set(audio_devices_t devices)
 {
-    if (devices == AUDIO_DEVICE_NONE) {
-        // Reset
-        mApplicableDevices = devices;
-        return NO_ERROR;
-    }
-    devices = static_cast<audio_devices_t>(devices | AUDIO_DEVICE_BIT_IN);
     if (!audio_is_input_device(devices)) {
         ALOGE("%s: trying to set an invalid device 0x%X for input source %s",
               __FUNCTION__, devices, getName().c_str());
diff --git a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
index 43b3dd2..86ac76f 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildCommonTypesStructureFile.py
@@ -55,7 +55,7 @@
     while i < decimal:
         i = i << 1
         pos = pos + 1
-        if pos == 32:
+        if pos == 64:
             return -1
 
     # TODO: b/168065706. This is just to fix the build. That the problem of devices with
@@ -132,6 +132,9 @@
 
     logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
 
+    multi_bit_output_device_shift = 32
+    multi_bit_input_device_shift = 32
+
     for line_number, line in enumerate(androidaudiobaseheaderFile):
         match = criteria_pattern.match(line)
         if match:
@@ -143,16 +146,36 @@
 
             component_type_numerical_value = match.groupdict()['values']
 
-            # for AUDIO_DEVICE_IN: need to remove sign bit / rename default to stub
+            # for AUDIO_DEVICE_IN: rename default to stub
             if component_type_name == "InputDevicesMask":
-                component_type_numerical_value = str(int(component_type_numerical_value, 0) & ~2147483648)
+                component_type_numerical_value = str(int(component_type_numerical_value, 0))
                 if component_type_literal == "default":
                     component_type_literal = "stub"
 
+                string_int = int(component_type_numerical_value, 0)
+                num_bits = bin(string_int).count("1")
+                if num_bits > 1:
+                    logging.info("The value {} is for criterion {} binary rep {} has {} bits sets"
+                        .format(component_type_numerical_value, component_type_name, bin(string_int), num_bits))
+                    string_int = 2**multi_bit_input_device_shift
+                    logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                    multi_bit_input_device_shift += 1
+                    component_type_numerical_value = str(string_int)
+
             if component_type_name == "OutputDevicesMask":
                 if component_type_literal == "default":
                     component_type_literal = "stub"
 
+                string_int = int(component_type_numerical_value, 0)
+                num_bits = bin(string_int).count("1")
+                if num_bits > 1:
+                    logging.info("The value {} is for criterion {} binary rep {} has {} bits sets"
+                        .format(component_type_numerical_value, component_type_name, bin(string_int), num_bits))
+                    string_int = 2**multi_bit_output_device_shift
+                    logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                    multi_bit_output_device_shift += 1
+                    component_type_numerical_value = str(string_int)
+
             # Remove duplicated numerical values
             if int(component_type_numerical_value, 0) in all_component_types[component_type_name].values():
                 logging.info("The value {}:{} is duplicated for criterion {}, KEEPING LATEST".format(component_type_numerical_value, component_type_literal, component_type_name))
diff --git a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
index 76c35c1..a15a6ba 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
@@ -85,6 +85,9 @@
     return argparser.parse_args()
 
 
+output_devices_type_value = {}
+input_devices_type_value = {}
+
 def generateXmlCriterionTypesFile(criterionTypes, addressCriteria, criterionTypesFile, outputFile):
 
     logging.info("Importing criterionTypesFile {}".format(criterionTypesFile))
@@ -102,6 +105,11 @@
                     value_node.set('numerical', str(value))
                     value_node.set('literal', key)
 
+                    if criterion_type.get('name') == "OutputDevicesMaskType":
+                        value_node.set('android_type', output_devices_type_value[key])
+                    if criterion_type.get('name') == "InputDevicesMaskType":
+                        value_node.set('android_type', input_devices_type_value[key])
+
     if addressCriteria:
         for criterion_name, values_list in addressCriteria.items():
             for criterion_type in criterion_types_root.findall('criterion_type'):
@@ -200,10 +208,8 @@
     #
     ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
 
-    #
-    # Reaching 32 bit limit for inclusive criterion out devices: removing
-    #
-    ignored_output_device_values = ['BleSpeaker', 'BleHeadset']
+    multi_bit_outputdevice_shift = 32
+    multi_bit_inputdevice_shift = 32
 
     criteria_pattern = re.compile(
         r"\s*V\((?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
@@ -223,28 +229,59 @@
                 ''.join((w.capitalize() for w in match.groupdict()['literal'].split('_')))
             criterion_numerical_value = match.groupdict()['values']
 
-            # for AUDIO_DEVICE_IN: need to remove sign bit / rename default to stub
+            # for AUDIO_DEVICE_IN: rename default to stub
             if criterion_name == "InputDevicesMaskType":
                 if criterion_literal == "Default":
                     criterion_numerical_value = str(int("0x40000000", 0))
+                    input_devices_type_value[criterion_literal] = "0xC0000000"
                 else:
                     try:
                         string_int = int(criterion_numerical_value, 0)
+                        # Append AUDIO_DEVICE_IN for android type tag
+                        input_devices_type_value[criterion_literal] = hex(string_int | 2147483648)
+
+                        num_bits = bin(string_int).count("1")
+                        if num_bits > 1:
+                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
+                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
+                            string_int = 2**multi_bit_inputdevice_shift
+                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                            multi_bit_inputdevice_shift += 1
+                            criterion_numerical_value = str(string_int)
+
                     except ValueError:
                         # Handle the exception
                         logging.info("value {}:{} for criterion {} is not a number, ignoring"
                             .format(criterion_numerical_value, criterion_literal, criterion_name))
                         continue
-                    criterion_numerical_value = str(int(criterion_numerical_value, 0) & ~2147483648)
 
             if criterion_name == "OutputDevicesMaskType":
                 if criterion_literal == "Default":
                     criterion_numerical_value = str(int("0x40000000", 0))
-                if criterion_literal in ignored_output_device_values:
-                    logging.info("OutputDevicesMaskType skipping {}".format(criterion_literal))
-                    continue
+                    output_devices_type_value[criterion_literal] = "0x40000000"
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                        output_devices_type_value[criterion_literal] = criterion_numerical_value
+
+                        num_bits = bin(string_int).count("1")
+                        if num_bits > 1:
+                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
+                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
+                            string_int = 2**multi_bit_outputdevice_shift
+                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                            multi_bit_outputdevice_shift += 1
+                            criterion_numerical_value = str(string_int)
+
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+
             try:
                 string_int = int(criterion_numerical_value, 0)
+
             except ValueError:
                 # Handle the exception
                 logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 3e04b68..0ef0b82 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -19,6 +19,7 @@
     header_libs: [
         "libbase_headers",
         "libaudiopolicycommon",
+        "libaudiofoundation_headers",
     ],
     shared_libs: [
         "liblog",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 63990ac..099d55d 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -23,6 +23,7 @@
 #include <SelectionCriterionInterface.h>
 #include <media/convert.h>
 #include <algorithm>
+#include <cutils/bitops.h>
 #include <cutils/config_utils.h>
 #include <cutils/misc.h>
 #include <fstream>
@@ -31,6 +32,7 @@
 #include <string>
 #include <vector>
 #include <stdint.h>
+#include <cinttypes>
 #include <cmath>
 #include <utils/Log.h>
 
@@ -124,9 +126,22 @@
 
     for (auto pair : pairs) {
         std::string error;
-        ALOGV("%s: Adding pair %d,%s for criterionType %s", __FUNCTION__, pair.first,
-              pair.second.c_str(), name.c_str());
-        criterionType->addValuePair(pair.first, pair.second, error);
+        ALOGV("%s: Adding pair %" PRIu64", %s for criterionType %s", __func__, std::get<0>(pair),
+              std::get<2>(pair).c_str(), name.c_str());
+        criterionType->addValuePair(std::get<0>(pair), std::get<2>(pair), error);
+
+        if (name == gOutputDeviceCriterionName) {
+            ALOGV("%s: Adding mOutputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
+                  __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
+            audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
+            mOutputDeviceToCriterionTypeMap[androidType] = std::get<0>(pair);
+        }
+        if (name == gInputDeviceCriterionName) {
+            ALOGV("%s: Adding mInputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
+                  __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
+            audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
+            mInputDeviceToCriterionTypeMap[androidType] = std::get<0>(pair);
+        }
     }
     ALOG_ASSERT(mPolicyCriteria.find(name) == mPolicyCriteria.end(),
                 "%s: Criterion %s already added", __FUNCTION__, name.c_str());
@@ -135,7 +150,7 @@
     mPolicyCriteria[name] = criterion;
 
     if (not defaultValue.empty()) {
-        int numericalValue = 0;
+        uint64_t numericalValue = 0;
         if (not criterionType->getNumericalValue(defaultValue.c_str(), numericalValue)) {
             ALOGE("%s; trying to apply invalid default literal value (%s)", __FUNCTION__,
                   defaultValue.c_str());
@@ -263,7 +278,7 @@
 }
 
 status_t ParameterManagerWrapper::setDeviceConnectionState(
-        audio_devices_t type, const std::string address, audio_policy_dev_state_t state)
+        audio_devices_t type, const std::string &address, audio_policy_dev_state_t state)
 {
     std::string criterionName = audio_is_output_device(type) ?
                 gOutputDeviceAddressCriterionName : gInputDeviceAddressCriterionName;
@@ -279,7 +294,7 @@
     }
 
     auto criterionType = criterion->getCriterionType();
-    int deviceAddressId;
+    uint64_t deviceAddressId;
     if (not criterionType->getNumericalValue(address.c_str(), deviceAddressId)) {
         ALOGW("%s: unknown device address reported (%s) for criterion %s", __FUNCTION__,
               address.c_str(), criterionName.c_str());
@@ -296,28 +311,28 @@
     return NO_ERROR;
 }
 
-status_t ParameterManagerWrapper::setAvailableInputDevices(audio_devices_t inputDevices)
+status_t ParameterManagerWrapper::setAvailableInputDevices(const DeviceTypeSet &types)
 {
     ISelectionCriterionInterface *criterion =
             getElement<ISelectionCriterionInterface>(gInputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gInputDeviceCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__, gInputDeviceCriterionName);
         return DEAD_OBJECT;
     }
-    criterion->setCriterionState(inputDevices & ~AUDIO_DEVICE_BIT_IN);
+    criterion->setCriterionState(convertDeviceTypesToCriterionValue(types));
     applyPlatformConfiguration();
     return NO_ERROR;
 }
 
-status_t ParameterManagerWrapper::setAvailableOutputDevices(audio_devices_t outputDevices)
+status_t ParameterManagerWrapper::setAvailableOutputDevices(const DeviceTypeSet &types)
 {
     ISelectionCriterionInterface *criterion =
             getElement<ISelectionCriterionInterface>(gOutputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gOutputDeviceCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__, gOutputDeviceCriterionName);
         return DEAD_OBJECT;
     }
-    criterion->setCriterionState(outputDevices);
+    criterion->setCriterionState(convertDeviceTypesToCriterionValue(types));
     applyPlatformConfiguration();
     return NO_ERROR;
 }
@@ -327,5 +342,45 @@
     mPfwConnector->applyConfigurations();
 }
 
+uint64_t ParameterManagerWrapper::convertDeviceTypeToCriterionValue(audio_devices_t type) const {
+    bool isOut = audio_is_output_devices(type);
+    uint32_t typeMask = isOut ? type : (type & ~AUDIO_DEVICE_BIT_IN);
+
+    const auto &adapters = isOut ? mOutputDeviceToCriterionTypeMap : mInputDeviceToCriterionTypeMap;
+    // Only multibit devices need adaptation.
+    if (popcount(typeMask) > 1) {
+        const auto &adapter = adapters.find(type);
+        if (adapter != adapters.end()) {
+            ALOGV("%s: multibit device %d converted to criterion %" PRIu64, __func__, type,
+                  adapter->second);
+            return adapter->second;
+        }
+        ALOGE("%s: failed to find map for multibit device %d", __func__, type);
+        return 0;
+    }
+    return typeMask;
+}
+
+uint64_t ParameterManagerWrapper::convertDeviceTypesToCriterionValue(
+        const DeviceTypeSet &types) const {
+    uint64_t criterionValue = 0;
+    for (const auto &type : types) {
+        criterionValue += convertDeviceTypeToCriterionValue(type);
+    }
+    return criterionValue;
+}
+
+DeviceTypeSet ParameterManagerWrapper::convertDeviceCriterionValueToDeviceTypes(
+        uint64_t criterionValue, bool isOut) const {
+    DeviceTypeSet deviceTypes;
+    const auto &adapters = isOut ? mOutputDeviceToCriterionTypeMap : mInputDeviceToCriterionTypeMap;
+    for (const auto &adapter : adapters) {
+        if ((adapter.second & criterionValue) == adapter.second) {
+            deviceTypes.insert(adapter.first);
+        }
+    }
+    return deviceTypes;
+}
+
 } // namespace audio_policy
 } // namespace android
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index 62b129a..fa4ae1e 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include <media/AudioContainers.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
 #include <utils/Errors.h>
@@ -35,7 +36,8 @@
 namespace android {
 namespace audio_policy {
 
-using ValuePair = std::pair<uint32_t, std::string>;
+using ValuePair = std::tuple<uint64_t, uint32_t, std::string>;
+using DeviceToCriterionTypeAdapter = std::map<audio_devices_t, uint64_t>;
 using ValuePairs = std::vector<ValuePair>;
 
 class ParameterManagerWrapper
@@ -105,7 +107,7 @@
      *
      * @return NO_ERROR if devices criterion updated correctly, error code otherwise.
      */
-    status_t setAvailableInputDevices(audio_devices_t inputDevices);
+    status_t setAvailableInputDevices(const DeviceTypeSet &inputDeviceTypes);
 
     /**
      * Set the available output devices i.e. set the associated policy parameter framework criterion
@@ -114,7 +116,7 @@
      *
      * @return NO_ERROR if devices criterion updated correctly, error code otherwise.
      */
-    status_t setAvailableOutputDevices(audio_devices_t outputDevices);
+    status_t setAvailableOutputDevices(const DeviceTypeSet &outputDeviceTypes);
 
     /**
      * @brief setDeviceConnectionState propagates a state event on a given device(s)
@@ -124,7 +126,7 @@
      * @return NO_ERROR if new state corretly propagated to Engine Parameter-Framework, error
      * code otherwise.
      */
-    status_t setDeviceConnectionState(audio_devices_t type, const std::string address,
+    status_t setDeviceConnectionState(audio_devices_t type, const std::string &address,
                                       audio_policy_dev_state_t state);
 
     /**
@@ -138,6 +140,13 @@
     status_t addCriterion(const std::string &name, bool isInclusive, ValuePairs pairs,
                           const std::string &defaultValue);
 
+    uint64_t convertDeviceTypeToCriterionValue(audio_devices_t type) const;
+
+    uint64_t convertDeviceTypesToCriterionValue(const DeviceTypeSet &types) const;
+
+    DeviceTypeSet convertDeviceCriterionValueToDeviceTypes(
+            uint64_t criterionValue, bool isOut) const;
+
 private:
     /**
      * Apply the configuration of the platform on the policy parameter manager.
@@ -211,6 +220,9 @@
     template <typename T>
     struct parameterManagerElementSupported;
 
+    DeviceToCriterionTypeAdapter mOutputDeviceToCriterionTypeMap;
+    DeviceToCriterionTypeAdapter mInputDeviceToCriterionTypeMap;
+
     static const char *const mPolicyPfwDefaultConfFileName; /**< Default Policy PFW top file name.*/
     static const char *const mPolicyPfwVendorConfFileName; /**< Vendor Policy PFW top file name.*/
 };
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7f9c0ac..4671fe9 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -31,6 +31,7 @@
     ],
     shared_libs: [
         "libaudiofoundation",
+        "libbase",
         "liblog",
         "libcutils",
         "libutils",
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index bc32416..0ddf66d 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -57,9 +57,6 @@
     <ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
         <AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
         </AttributesGroup>
     </ProductStrategy>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index c73c17d..d4d514d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -270,9 +270,10 @@
         devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
         if (!devices.isEmpty()) break;
         devices = availableOutputDevices.getFirstDevicesFromTypes(
-                                          getLastRemovableMediaDevices());
+                        getLastRemovableMediaDevices(GROUP_NONE, {AUDIO_DEVICE_OUT_BLE_HEADSET}));
         if (!devices.isEmpty()) break;
-        devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_EARPIECE);
+        devices = availableOutputDevices.getFirstDevicesFromTypes({
+                AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE});
     } break;
 
     case STRATEGY_SONIFICATION:
@@ -343,6 +344,30 @@
             (getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) == AUDIO_POLICY_FORCE_SPEAKER)) {
             devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
         }
+
+        // LE audio broadcast device is only used if:
+        // - No call is active
+        // - either MEDIA or SONIFICATION_RESPECTFUL is the highest priority active strategy
+        //   OR the LE audio unicast device is not active
+        if (devices2.isEmpty() && !isInCall()
+                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL)) {
+            legacy_strategy topActiveStrategy = STRATEGY_NONE;
+            for (const auto &ps : getOrderedProductStrategies()) {
+                if (outputs.isStrategyActive(ps)) {
+                    topActiveStrategy =  mLegacyStrategyMap.find(ps) != end(mLegacyStrategyMap) ?
+                            mLegacyStrategyMap.at(ps) : STRATEGY_NONE;
+                    break;
+                }
+            }
+
+            if (topActiveStrategy == STRATEGY_NONE || topActiveStrategy == STRATEGY_MEDIA
+                    || topActiveStrategy == STRATEGY_SONIFICATION_RESPECTFUL
+                    || !outputs.isAnyDeviceTypeActive(getAudioDeviceOutLeAudioUnicastSet())) {
+                devices2 =
+                        availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_BLE_BROADCAST);
+            }
+        }
+
         if (devices2.isEmpty() && (getLastRemovableMediaDevices().size() > 0)) {
             if ((getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA) != AUDIO_POLICY_FORCE_NO_BT_A2DP)) {
                 // Get the last connected device of wired and bluetooth a2dp
@@ -364,7 +389,8 @@
                     AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET);
         }
         if (devices2.isEmpty()) {
-            devices2 = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
+            devices2 = availableOutputDevices.getFirstDevicesFromTypes({
+                        AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_SPEAKER});
         }
         DeviceVector devices3;
         if (strategy == STRATEGY_MEDIA) {
@@ -460,6 +486,7 @@
         case AUDIO_SOURCE_HOTWORD:
         case AUDIO_SOURCE_CAMCORDER:
         case AUDIO_SOURCE_VOICE_PERFORMANCE:
+        case AUDIO_SOURCE_ULTRASOUND:
             inputSource = AUDIO_SOURCE_VOICE_COMMUNICATION;
             break;
         default:
@@ -482,7 +509,7 @@
             if (device != nullptr) break;
         }
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_WIRED_HEADSET,
                 AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
                 AUDIO_DEVICE_IN_BLUETOOTH_BLE, AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
@@ -536,7 +563,7 @@
         // because sometimes user want to do voice search by bt remote
         // even if BUILDIN_MIC is available.
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_WIRED_HEADSET,
                 AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
                 AUDIO_DEVICE_IN_BLUETOOTH_BLE, AUDIO_DEVICE_IN_BUILTIN_MIC});
 
@@ -552,7 +579,7 @@
             if (device != nullptr) break;
         }
         device = availableDevices.getFirstExistingDevice({
-                AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+                AUDIO_DEVICE_IN_WIRED_HEADSET,
                 AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
                 AUDIO_DEVICE_IN_BUILTIN_MIC});
         break;
@@ -586,6 +613,10 @@
         device = availableDevices.getDevice(
                 AUDIO_DEVICE_IN_ECHO_REFERENCE, String8(""), AUDIO_FORMAT_DEFAULT);
         break;
+    case AUDIO_SOURCE_ULTRASOUND:
+        device = availableDevices.getFirstExistingDevice({
+                AUDIO_DEVICE_IN_BUILTIN_MIC, AUDIO_DEVICE_IN_BACK_MIC});
+        break;
     default:
         ALOGW("getDeviceForInputSource() invalid input source %d", inputSource);
         break;
@@ -645,7 +676,7 @@
         // there is a preferred device, is it available?
         preferredAvailableDevVec =
                 availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
-        if (preferredAvailableDevVec.size() == preferredAvailableDevVec.size()) {
+        if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
             ALOGVV("%s using pref device %s for strategy %u",
                    __func__, preferredAvailableDevVec.toString().c_str(), strategy);
             return preferredAvailableDevVec;
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index faf15d6..9f6b703 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -62,4 +62,7 @@
         "libaudiopolicymanager_interface_headers",
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
+    fuzz_config: {
+       cc: ["mnaganov@google.com"],
+    },
 }
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 8584702..48f7410 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -30,6 +30,7 @@
 #include <libxml/parser.h>
 #include <libxml/xinclude.h>
 #include <media/AudioPolicy.h>
+#include <media/AudioProfile.h>
 #include <media/PatchBuilder.h>
 #include <media/RecordingActivityTracker.h>
 
@@ -126,13 +127,20 @@
     return result;
 }();
 
+/**
+ * AudioSource - AUDIO_SOURCE_VOICE_COMMUNICATION and AUDIO_SOURCE_HOTWORD
+ * are excluded from kAudioSources[] in order to avoid the abort triggered
+ * for these two types of AudioSource in Engine::getDeviceForInputSource()
+ */
 static const std::vector<audio_source_t> kAudioSources = [] {
     std::vector<audio_source_t> result;
     for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
         audio_source_t audioSourceHal;
         std::string audioSource = toString(enumVal);
-        if (audio_source_from_string(audioSource.c_str(), &audioSourceHal)) {
-            result.push_back(audioSourceHal);
+        if (enumVal != xsd::AudioSource::AUDIO_SOURCE_VOICE_COMMUNICATION &&
+            enumVal != xsd::AudioSource::AUDIO_SOURCE_HOTWORD &&
+            audio_source_from_string(audioSource.c_str(), &audioSourceHal)) {
+          result.push_back(audioSourceHal);
         }
     }
     return result;
@@ -250,13 +258,14 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
 
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource;
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
-            &config, &flags, selectedDeviceId, portId, {}, &outputType) != OK) {
+            &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized) != OK) {
         return false;
     }
     if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
@@ -841,6 +850,9 @@
         : AudioPolicyManagerFuzzerWithConfigurationFile(fdp){};
     void process() override;
 
+    void fuzzGetDirectPlaybackSupport();
+    void fuzzGetDirectProfilesForAttributes();
+
    protected:
     void setDeviceConnectionState();
     void explicitlyRoutingAfterConnection();
@@ -891,10 +903,41 @@
     }
 }
 
+void AudioPolicyManagerFuzzerDeviceConnection::fuzzGetDirectPlaybackSupport() {
+    const uint32_t numTestCases = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numTestCases; ++i) {
+        audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+        attr.content_type = getValueFromVector<audio_content_type_t>(mFdp, kAudioContentTypes);
+        attr.usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
+        attr.source = getValueFromVector<audio_source_t>(mFdp, kAudioSources);
+        attr.flags = getValueFromVector<audio_flags_mask_t>(mFdp, kAudioFlagMasks);
+        audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+        config.channel_mask = getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks);
+        config.format = getValueFromVector<audio_format_t>(mFdp, kAudioFormats);
+        config.sample_rate = getValueFromVector<uint32_t>(mFdp, kSamplingRates);
+        mManager->getDirectPlaybackSupport(&attr, &config);
+    }
+}
+
+void AudioPolicyManagerFuzzerDeviceConnection::fuzzGetDirectProfilesForAttributes() {
+    const uint32_t numTestCases = mFdp->ConsumeIntegralInRange<uint32_t>(1, 10);
+    for (int i = 0; i < numTestCases; ++i) {
+        AudioProfileVector audioProfiles;
+        audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+        attr.content_type = getValueFromVector<audio_content_type_t>(mFdp, kAudioContentTypes);
+        attr.usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
+        attr.source = getValueFromVector<audio_source_t>(mFdp, kAudioSources);
+        attr.flags = getValueFromVector<audio_flags_mask_t>(mFdp, kAudioFlagMasks);
+        mManager->getDirectProfilesForAttributes(&attr, audioProfiles);
+    }
+}
+
 void AudioPolicyManagerFuzzerDeviceConnection::process() {
     if (initialize()) {
         setDeviceConnectionState();
         explicitlyRoutingAfterConnection();
+        fuzzGetDirectPlaybackSupport();
+        fuzzGetDirectProfilesForAttributes();
         fuzzPatchCreation();
     }
 }
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 0165dc8..4b4817e 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -36,6 +36,8 @@
         "libaudiopolicyenginedefault",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "android.media.audio.common.types-V1-cpp",
+        "audioclient-types-aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 5a18762..744609f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -38,6 +38,7 @@
 #include <vector>
 
 #include <Serializer.h>
+#include <android/media/audio/common/AudioPort.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioParameter.h>
@@ -53,6 +54,10 @@
 
 namespace android {
 
+using android::media::audio::common::AudioDevice;
+using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioPortDeviceExt;
+using android::media::audio::common::AudioPortExt;
 using content::AttributionSourceState;
 
 //FIXME: workaround for truncated touch sounds
@@ -97,44 +102,80 @@
 // AudioPolicyInterface implementation
 // ----------------------------------------------------------------------------
 
-status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device,
-                                                      audio_policy_dev_state_t state,
-                                                      const char *device_address,
-                                                      const char *device_name,
-                                                      audio_format_t encodedFormat)
-{
-    status_t status = setDeviceConnectionStateInt(device, state, device_address,
-                                                  device_name, encodedFormat);
+status_t AudioPolicyManager::setDeviceConnectionState(audio_policy_dev_state_t state,
+        const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat) {
+    status_t status = setDeviceConnectionStateInt(state, port, encodedFormat);
     nextAudioPortGeneration();
     return status;
 }
 
+status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device,
+                                                      audio_policy_dev_state_t state,
+                                                      const char* device_address,
+                                                      const char* device_name,
+                                                      audio_format_t encodedFormat) {
+    media::AudioPort aidlPort;
+    if (status_t status = deviceToAudioPort(device, device_address, device_name, &aidlPort);
+        status == OK) {
+        return setDeviceConnectionState(state, aidlPort.hal, encodedFormat);
+    } else {
+        ALOGE("Failed to convert to AudioPort Parcelable: %s", statusToString(status).c_str());
+        return status;
+    }
+}
+
 void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
                                                         audio_policy_dev_state_t state)
 {
-    AudioParameter param(String8(device->address().c_str()));
-    const String8 key(state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE ?
-                AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
-    param.addInt(key, device->type());
-    mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString());
+    audio_port_v7 devicePort;
+    device->toAudioPort(&devicePort);
+    if (status_t status = mpClientInterface->setDeviceConnectedState(
+                    &devicePort, state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+            status != OK) {
+        ALOGE("Error %d while setting connected state for device %s", status,
+                device->getDeviceTypeAddr().toString(false).c_str());
+    }
+}
+
+status_t AudioPolicyManager::setDeviceConnectionStateInt(
+        audio_policy_dev_state_t state, const android::media::audio::common::AudioPort& port,
+        audio_format_t encodedFormat) {
+    // TODO: b/211601178 Forward 'port' to Audio HAL via mHwModules. For now, only device_type,
+    // device_address and device_name are forwarded.
+    if (port.ext.getTag() != AudioPortExt::device) {
+        return BAD_VALUE;
+    }
+    audio_devices_t device_type;
+    std::string device_address;
+    if (status_t status = aidl2legacy_AudioDevice_audio_device(
+                port.ext.get<AudioPortExt::device>().device, &device_type, &device_address);
+        status != OK) {
+        return status;
+    };
+    const char* device_name = port.name.c_str();
+    // connect/disconnect only 1 device at a time
+    if (!audio_is_output_device(device_type) && !audio_is_input_device(device_type))
+        return BAD_VALUE;
+
+    sp<DeviceDescriptor> device = mHwModules.getDeviceDescriptor(
+            device_type, device_address.c_str(), device_name, encodedFormat,
+            state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+    return device ? setDeviceConnectionStateInt(device, state) : INVALID_OPERATION;
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t deviceType,
                                                          audio_policy_dev_state_t state,
-                                                         const char *device_address,
-                                                         const char *device_name,
-                                                         audio_format_t encodedFormat)
-{
-    ALOGV("setDeviceConnectionStateInt() device: 0x%X, state %d, address %s name %s format 0x%X",
-            deviceType, state, device_address, device_name, encodedFormat);
-
-    // connect/disconnect only 1 device at a time
-    if (!audio_is_output_device(deviceType) && !audio_is_input_device(deviceType)) return BAD_VALUE;
-
-    sp<DeviceDescriptor> device =
-            mHwModules.getDeviceDescriptor(deviceType, device_address, device_name, encodedFormat,
-                                           state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
-    return device ? setDeviceConnectionStateInt(device, state) : INVALID_OPERATION;
+                                                         const char* device_address,
+                                                         const char* device_name,
+                                                         audio_format_t encodedFormat) {
+    media::AudioPort aidlPort;
+    if (status_t status = deviceToAudioPort(deviceType, device_address, device_name, &aidlPort);
+        status == OK) {
+        return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat);
+    } else {
+        ALOGE("Failed to convert to AudioPort Parcelable: %s", statusToString(status).c_str());
+        return status;
+    }
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(const sp<DeviceDescriptor> &device,
@@ -149,6 +190,9 @@
         // save a copy of the opened output descriptors before any output is opened or closed
         // by checkOutputsForDevice(). This will be needed by checkOutputForAllStrategies()
         mPreviousOutputs = mOutputs;
+
+        bool wasLeUnicastActive = isLeUnicastActive();
+
         switch (state)
         {
         // handle output device connection
@@ -246,9 +290,12 @@
                     sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
                     // close unused outputs after device disconnection or direct outputs that have
                     // been opened by checkOutputsForDevice() to query dynamic parameters
+                    // "outputs" vector never contains duplicated outputs
                     if ((state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)
                             || (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) &&
-                                (desc->mDirectOpenCount == 0))) {
+                                (desc->mDirectOpenCount == 0))
+                            || (((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) &&
+                                !isOutputOnlyAvailableRouteToSomeDevice(desc))) {
                         clearAudioSourcesForOutput(output);
                         closeOutput(output);
                     }
@@ -312,6 +359,8 @@
             cleanUpForDevice(device);
         }
 
+        checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, 0);
+
         mpClientInterface->onAudioPortListUpdate();
         return NO_ERROR;
     }  // end if is output device
@@ -399,6 +448,14 @@
     return BAD_VALUE;
 }
 
+status_t AudioPolicyManager::deviceToAudioPort(audio_devices_t device, const char* device_address,
+                                               const char* device_name,
+                                               media::AudioPort* aidlPort) {
+    DeviceDescriptorBase devDescr(device, device_address);
+    devDescr.setName(device_name);
+    return devDescr.writeToParcelable(aidlPort);
+}
+
 void AudioPolicyManager::setEngineDeviceConnectionState(const sp<DeviceDescriptor> device,
                                       audio_policy_dev_state_t state) {
 
@@ -525,10 +582,10 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getHwOffloadEncodingFormatsSupportedForA2DP(
-                                    std::vector<audio_format_t> *formats)
+status_t AudioPolicyManager::getHwOffloadFormatsSupportedForBluetoothMedia(
+                                    audio_devices_t device, std::vector<audio_format_t> *formats)
 {
-    ALOGV("getHwOffloadEncodingFormatsSupportedForA2DP()");
+    ALOGV("getHwOffloadFormatsSupportedForBluetoothMedia()");
     status_t status = NO_ERROR;
     std::unordered_set<audio_format_t> formatSet;
     sp<HwModule> primaryModule =
@@ -537,8 +594,23 @@
         ALOGE("%s() unable to get primary module", __func__);
         return NO_INIT;
     }
+
+    DeviceTypeSet audioDeviceSet;
+
+    switch(device) {
+    case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        audioDeviceSet = getAudioDeviceOutAllA2dpSet();
+        break;
+    case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        audioDeviceSet = getAudioDeviceOutAllBleSet();
+        break;
+    default:
+        ALOGE("%s() device type 0x%08x not supported", __func__, device);
+        return BAD_VALUE;
+    }
+
     DeviceVector declaredDevices = primaryModule->getDeclaredDevices().getDevicesFromTypes(
-            getAudioDeviceOutAllA2dpSet());
+            audioDeviceSet);
     for (const auto& device : declaredDevices) {
         formatSet.insert(device->encodedFormats().begin(), device->encodedFormats().end());
     }
@@ -600,12 +672,8 @@
     ALOGV("%s device rxDevice %s txDevice %s", __func__,
           rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
 
-    disconnectTelephonyRxAudioSource();
-    // release TX patch if any
-    if (mCallTxPatch != 0) {
-        releaseAudioPatchInternal(mCallTxPatch->getHandle());
-        mCallTxPatch.clear();
-    }
+    disconnectTelephonyAudioSource(mCallRxSourceClient);
+    disconnectTelephonyAudioSource(mCallTxSourceClient);
 
     auto telephonyRxModule =
         mHwModules.getModuleForDeviceType(AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
@@ -663,7 +731,7 @@
                 closeActiveClients(activeDesc);
             }
         }
-        mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
+        connectTelephonyTxAudioSource(txSourceDevice, txSinkDevice, delayMs);
     }
     if (waitMs != nullptr) {
         *waitMs = muteWaitMs;
@@ -671,36 +739,6 @@
     return NO_ERROR;
 }
 
-sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
-        bool isRx, const sp<DeviceDescriptor> &device, uint32_t delayMs) {
-    PatchBuilder patchBuilder;
-
-    if (device == nullptr) {
-        return nullptr;
-    }
-
-    // @TODO: still ignoring the address, or not dealing platform with multiple telephony devices
-    if (isRx) {
-        patchBuilder.addSink(device).
-                addSource(mAvailableInputDevices.getDevice(
-                    AUDIO_DEVICE_IN_TELEPHONY_RX, String8(), AUDIO_FORMAT_DEFAULT));
-    } else {
-        patchBuilder.addSource(device).
-                addSink(mAvailableOutputDevices.getDevice(
-                    AUDIO_DEVICE_OUT_TELEPHONY_TX, String8(), AUDIO_FORMAT_DEFAULT));
-    }
-
-    audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
-    status_t status =
-            createAudioPatchInternal(patchBuilder.patch(), &patchHandle, mUidCached, delayMs);
-    ssize_t index = mAudioPatches.indexOfKey(patchHandle);
-    if (status != NO_ERROR || index < 0) {
-        ALOGW("%s() error %d creating %s audio patch", __func__, status, isRx ? "RX" : "TX");
-        return nullptr;
-    }
-    return mAudioPatches.valueAt(index);
-}
-
 bool AudioPolicyManager::isDeviceOfModule(
         const sp<DeviceDescriptor>& devDesc, const char *moduleId) const {
     sp<HwModule> module = mHwModules.getModuleFromName(moduleId);
@@ -715,20 +753,55 @@
 
 void AudioPolicyManager::connectTelephonyRxAudioSource()
 {
-    disconnectTelephonyRxAudioSource();
+    disconnectTelephonyAudioSource(mCallRxSourceClient);
     const struct audio_port_config source = {
         .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-    status_t status = startAudioSource(&source, &aa, &mCallRxSourceClientPort, 0/*uid*/);
-    ALOGE_IF(status != NO_ERROR, "%s failed to start Telephony Rx AudioSource", __func__);
+    mCallRxSourceClient = startAudioSourceInternal(&source, &aa, 0/*uid*/);
+    ALOGE_IF(mCallRxSourceClient == nullptr,
+             "%s failed to start Telephony Rx AudioSource", __func__);
 }
 
-void AudioPolicyManager::disconnectTelephonyRxAudioSource()
+void AudioPolicyManager::disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc)
 {
-    stopAudioSource(mCallRxSourceClientPort);
-    mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
+    if (clientDesc == nullptr) {
+        return;
+    }
+    ALOGW_IF(stopAudioSource(clientDesc->portId()) != NO_ERROR,
+            "%s error stopping audio source", __func__);
+    clientDesc.clear();
+}
+
+void AudioPolicyManager::connectTelephonyTxAudioSource(
+        const sp<DeviceDescriptor> &srcDevice, const sp<DeviceDescriptor> &sinkDevice,
+        uint32_t delayMs)
+{
+    disconnectTelephonyAudioSource(mCallTxSourceClient);
+    if (srcDevice == nullptr || sinkDevice == nullptr) {
+        ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
+        return;
+    }
+    PatchBuilder patchBuilder;
+    patchBuilder.addSource(srcDevice).addSink(sinkDevice);
+    ALOGV("%s between source %s and sink %s", __func__,
+            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+    auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
+    const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+    struct audio_port_config source = {};
+    srcDevice->toAudioPortConfig(&source);
+    mCallTxSourceClient = new InternalSourceClientDescriptor(
+                callTxSourceClientPortId, mUidCached, aa, source, srcDevice, sinkDevice,
+                mCommunnicationStrategy, toVolumeSource(aa));
+    audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
+    status_t status = connectAudioSourceToSink(
+                mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
+                delayMs);
+    ALOGE_IF(status != NO_ERROR, "%s() error %d creating TX audio patch", __func__, status);
+    if (status == NO_ERROR) {
+        mAudioSources.add(callTxSourceClientPortId, mCallTxSourceClient);
+    }
 }
 
 void AudioPolicyManager::setPhoneState(audio_mode_t state)
@@ -736,6 +809,7 @@
     ALOGV("setPhoneState() state %d", state);
     // store previous phone state for management of sonification strategy below
     int oldState = mEngine->getPhoneState();
+    bool wasLeUnicastActive = isLeUnicastActive();
 
     if (mEngine->setPhoneState(state) != NO_ERROR) {
         ALOGW("setPhoneState() invalid or same state %d", state);
@@ -796,11 +870,8 @@
                 rxDevices = mPrimaryOutput->devices();
             }
             if (oldState == AUDIO_MODE_IN_CALL) {
-                disconnectTelephonyRxAudioSource();
-                if (mCallTxPatch != 0) {
-                    releaseAudioPatchInternal(mCallTxPatch->getHandle());
-                    mCallTxPatch.clear();
-                }
+                disconnectTelephonyAudioSource(mCallRxSourceClient);
+                disconnectTelephonyAudioSource(mCallTxSourceClient);
             }
             setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
         }
@@ -810,11 +881,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
-        if (state != AUDIO_MODE_IN_CALL || desc != mPrimaryOutput) {
-            setOutputDevices(desc, newDevices, !newDevices.isEmpty(), 0 /*delayMs*/);
+        if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
+            bool forceRouting = !newDevices.isEmpty();
+            setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
+                             true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
     }
 
+    checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
+
     if (isStateInCall(state)) {
         ALOGV("setPhoneState() in call state management: new state is %d", state);
         // force reevaluating accessibility routing when call starts
@@ -871,6 +946,32 @@
     ALOGV("setSystemProperty() property %s, value %s", property, value);
 }
 
+// Find an MSD output profile compatible with the parameters passed.
+// When "directOnly" is set, restrict search to profiles for direct outputs.
+sp<IOProfile> AudioPolicyManager::getMsdProfileForOutput(
+                                                   const DeviceVector& devices,
+                                                   uint32_t samplingRate,
+                                                   audio_format_t format,
+                                                   audio_channel_mask_t channelMask,
+                                                   audio_output_flags_t flags,
+                                                   bool directOnly)
+{
+    flags = getRelevantFlags(flags, directOnly);
+
+    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    if (msdModule != nullptr) {
+        // for the msd module check if there are patches to the output devices
+        if (msdHasPatchesToAllDevices(devices.toTypeAddrVector())) {
+            HwModuleCollection modules;
+            modules.add(msdModule);
+            return searchCompatibleProfileHwModules(
+                    modules, getMsdAudioOutDevices(), samplingRate, format, channelMask,
+                    flags, directOnly);
+        }
+    }
+    return nullptr;
+}
+
 // Find an output profile compatible with the parameters passed. When "directOnly" is set, restrict
 // search to profiles for direct outputs.
 sp<IOProfile> AudioPolicyManager::getProfileForOutput(
@@ -881,45 +982,65 @@
                                                    audio_output_flags_t flags,
                                                    bool directOnly)
 {
+    flags = getRelevantFlags(flags, directOnly);
+
+    return searchCompatibleProfileHwModules(
+            mHwModules, devices, samplingRate, format, channelMask, flags, directOnly);
+}
+
+audio_output_flags_t AudioPolicyManager::getRelevantFlags (
+                                            audio_output_flags_t flags, bool directOnly) {
     if (directOnly) {
-        // only retain flags that will drive the direct output profile selection
-        // if explicitly requested
-        static const uint32_t kRelevantFlags =
+         // only retain flags that will drive the direct output profile selection
+         // if explicitly requested
+         static const uint32_t kRelevantFlags =
                 (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
-                 AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
-        flags =
-            (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
+                AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
+         flags = (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
     }
+    return flags;
+}
 
+sp<IOProfile> AudioPolicyManager::searchCompatibleProfileHwModules (
+                                        const HwModuleCollection& hwModules,
+                                        const DeviceVector& devices,
+                                        uint32_t samplingRate,
+                                        audio_format_t format,
+                                        audio_channel_mask_t channelMask,
+                                        audio_output_flags_t flags,
+                                        bool directOnly) {
     sp<IOProfile> profile;
-
-    for (const auto& hwModule : mHwModules) {
+    for (const auto& hwModule : hwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-            if (!curProfile->isCompatibleProfile(devices,
-                    samplingRate, NULL /*updatedSamplingRate*/,
-                    format, NULL /*updatedFormat*/,
-                    channelMask, NULL /*updatedChannelMask*/,
-                    flags)) {
+             if (!curProfile->isCompatibleProfile(devices,
+                     samplingRate, NULL /*updatedSamplingRate*/,
+                     format, NULL /*updatedFormat*/,
+                     channelMask, NULL /*updatedChannelMask*/,
+                     flags)) {
+                 continue;
+             }
+             // reject profiles not corresponding to a device currently available
+             if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
+                 continue;
+             }
+             // reject profiles if connected device does not support codec
+             if (!curProfile->devicesSupportEncodedFormats(devices.types())) {
+                 continue;
+             }
+             if (!directOnly) {
+                return curProfile;
+             }
+
+             // when searching for direct outputs, if several profiles are compatible, give priority
+             // to one with offload capability
+             if (profile != 0 && 
+                 ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
                 continue;
-            }
-            // reject profiles not corresponding to a device currently available
-            if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
-                continue;
-            }
-            // reject profiles if connected device does not support codec
-            if (!curProfile->devicesSupportEncodedFormats(devices.types())) {
-                continue;
-            }
-            if (!directOnly) return curProfile;
-            // when searching for direct outputs, if several profiles are compatible, give priority
-            // to one with offload capability
-            if (profile != 0 && ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
-                continue;
-            }
-            profile = curProfile;
-            if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
-                break;
-            }
+             }
+             profile = curProfile;
+             if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+                 break;
+             }
         }
     }
     return profile;
@@ -1012,7 +1133,8 @@
         audio_port_handle_t *selectedDeviceId,
         bool *isRequestedDeviceForExclusiveUse,
         std::vector<sp<AudioPolicyMix>> *secondaryMixes,
-        output_type_t *outputType)
+        output_type_t *outputType,
+        bool *isSpatialized)
 {
     DeviceVector outputDevices;
     const audio_port_handle_t requestedPortId = *selectedDeviceId;
@@ -1021,6 +1143,8 @@
         mAvailableOutputDevices.getDeviceFromId(requestedPortId);
 
     *outputType = API_OUTPUT_INVALID;
+    *isSpatialized = false;
+
     status_t status = getAudioAttributes(resultAttr, attr, *stream);
     if (status != NO_ERROR) {
         return status;
@@ -1120,7 +1244,7 @@
 
     *output = AUDIO_IO_HANDLE_NONE;
     if (!msdDevices.isEmpty()) {
-        *output = getOutputForDevices(msdDevices, session, resultAttr, config, flags);
+        *output = getOutputForDevices(msdDevices, session, resultAttr, config, flags, isSpatialized);
         if (*output != AUDIO_IO_HANDLE_NONE && setMsdOutputPatches(&outputDevices) == NO_ERROR) {
             ALOGV("%s() Using MSD devices %s instead of devices %s",
                   __func__, msdDevices.toString().c_str(), outputDevices.toString().c_str());
@@ -1130,7 +1254,7 @@
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
-                flags, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+                flags, isSpatialized, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         return INVALID_OPERATION;
@@ -1165,7 +1289,8 @@
                                               audio_port_handle_t *selectedDeviceId,
                                               audio_port_handle_t *portId,
                                               std::vector<audio_io_handle_t> *secondaryOutputs,
-                                              output_type_t *outputType)
+                                              output_type_t *outputType,
+                                              bool *isSpatialized)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1187,7 +1312,7 @@
 
     status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
             config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-            secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType);
+            secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized);
     if (status != NO_ERROR) {
         return status;
     }
@@ -1330,6 +1455,7 @@
         const audio_attributes_t *attr,
         const audio_config_t *config,
         audio_output_flags_t *flags,
+        bool *isSpatialized,
         bool forceMutingHaptic)
 {
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -1371,9 +1497,15 @@
         ALOGV("Set VoIP and Direct output flags for PCM format");
     }
 
+    // Attach the Ultrasound flag for the AUDIO_CONTENT_TYPE_ULTRASOUND
+    if (attr->content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
+        *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_ULTRASOUND);
+    }
+
+    *isSpatialized = false;
     if (mSpatializerOutput != nullptr
-            && canBeSpatializedInt(attr, config,
-                    devices.toTypeAddrVector(), false /* allowCurrentOutputReconfig */)) {
+            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())) {
+        *isSpatialized = true;
         return mSpatializerOutput->mIoHandle;
     }
 
@@ -1441,6 +1573,27 @@
     return msdPatches;
 }
 
+bool AudioPolicyManager::isMsdPatch(const audio_patch_handle_t &handle) const {
+    ssize_t index = mAudioPatches.indexOfKey(handle);
+    if (index < 0) {
+        return false;
+    }
+    const sp<AudioPatch> patch = mAudioPatches.valueAt(index);
+    sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    if (msdModule == nullptr) {
+        return false;
+    }
+    const struct audio_port_config *sink = &patch->mPatch.sinks[0];
+    if (getMsdAudioOutDevices().contains(mAvailableOutputDevices.getDeviceFromId(sink->id))) {
+        return true;
+    }
+    index = getMsdOutputPatches().indexOfKey(handle);
+    if (index < 0) {
+        return false;
+    }
+    return true;
+}
+
 status_t AudioPolicyManager::getMsdProfiles(bool hwAvSync,
                                             const InputProfileCollection &inputProfiles,
                                             const OutputProfileCollection &outputProfiles,
@@ -1640,6 +1793,28 @@
     }
 }
 
+bool AudioPolicyManager::msdHasPatchesToAllDevices(const AudioDeviceTypeAddrVector& devices) {
+    DeviceVector devicesToCheck = mOutputDevicesAll.getDevicesFromDeviceTypeAddrVec(devices);
+    AudioPatchCollection msdPatches = getMsdOutputPatches();
+    for (size_t i = 0; i < msdPatches.size(); i++) {
+        const auto& patch = msdPatches[i];
+        for (size_t j = 0; j < patch->mPatch.num_sinks; ++j) {
+            const struct audio_port_config *sink = &patch->mPatch.sinks[j];
+            if (sink->type == AUDIO_PORT_TYPE_DEVICE) {
+                const auto& foundDevice = devicesToCheck.getDevice(
+                    sink->ext.device.type, String8(sink->ext.device.address), AUDIO_FORMAT_DEFAULT);
+                if (foundDevice != nullptr) {
+                    devicesToCheck.remove(foundDevice);
+                    if (devicesToCheck.isEmpty()) {
+                        return true;
+                    }
+                }
+            }
+        }
+    }
+    return false;
+}
+
 audio_io_handle_t AudioPolicyManager::selectOutput(const SortedVector<audio_io_handle_t>& outputs,
                                                    audio_output_flags_t flags,
                                                    audio_format_t format,
@@ -1669,7 +1844,8 @@
     // other criteria
     static const audio_output_flags_t kFunctionalFlags = (audio_output_flags_t)
         (AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_INCALL_MUSIC |
-            AUDIO_OUTPUT_FLAG_TTS | AUDIO_OUTPUT_FLAG_DIRECT_PCM);
+            AUDIO_OUTPUT_FLAG_TTS | AUDIO_OUTPUT_FLAG_DIRECT_PCM | AUDIO_OUTPUT_FLAG_ULTRASOUND |
+            AUDIO_OUTPUT_FLAG_SPATIALIZER);
     // Flags expressing a performance request: have lower priority than serving
     // requested sampling rate or channel mask
     static const audio_output_flags_t kPerformanceFlags = (audio_output_flags_t)
@@ -1688,6 +1864,8 @@
     // The priority is as follows:
     // 1: the output supporting haptic playback when requesting haptic playback
     // 2: the output with the highest number of requested functional flags
+    //    with tiebreak preferring the minimum number of extra functional flags
+    //    (see b/200293124, the incorrect selection of AUDIO_OUTPUT_FLAG_VOIP_RX).
     // 3: the output supporting the exact channel mask
     // 4: the output with a higher channel count than requested
     // 5: the output with a higher sampling rate than requested
@@ -1729,7 +1907,12 @@
         }
 
         // functional flags match
-        currentMatchCriteria[1] = popcount(outputDesc->mFlags & functionalFlags);
+        const int matchingFunctionalFlags =
+                __builtin_popcount(outputDesc->mFlags & functionalFlags);
+        const int totalFunctionalFlags =
+                __builtin_popcount(outputDesc->mFlags & kFunctionalFlags);
+        // Prefer matching functional flags, but subtract unnecessary functional flags.
+        currentMatchCriteria[1] = 100 * (matchingFunctionalFlags + 1) - totalFunctionalFlags;
 
         // channel mask and channel count match
         uint32_t outputChannelCount = audio_channel_count_from_out_mask(
@@ -1813,6 +1996,22 @@
     return status;
 }
 
+bool AudioPolicyManager::isLeUnicastActive() const {
+    if (isInCall()) {
+        return true;
+    }
+    return isAnyDeviceTypeActive(getAudioDeviceOutLeAudioUnicastSet());
+}
+
+bool AudioPolicyManager::isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const {
+    if (mAvailableOutputDevices.getDevicesFromTypes(deviceTypes).isEmpty()) {
+        return false;
+    }
+    bool active = mOutputs.isAnyDeviceTypeActive(deviceTypes);
+    ALOGV("%s active %d", __func__, active);
+    return active;
+}
+
 status_t AudioPolicyManager::startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
                                          const sp<TrackClientDescriptor>& client,
                                          uint32_t *delayMs)
@@ -1828,7 +2027,7 @@
     if (stream == AUDIO_STREAM_TTS) {
         ALOGV("\t found BEACON stream");
         if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(
-                                    toVolumeSource(AUDIO_STREAM_TTS) /*sourceToIgnore*/)) {
+                                    toVolumeSource(AUDIO_STREAM_TTS, false) /*sourceToIgnore*/)) {
             return INVALID_OPERATION;
         } else {
             beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
@@ -1840,8 +2039,7 @@
 
     // force device change if the output is inactive and no audio patch is already present.
     // check active before incrementing usage count
-    bool force = !outputDesc->isActive() &&
-            (outputDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE);
+    bool force = !outputDesc->isActive() && !outputDesc->isRouted();
 
     DeviceVector devices;
     sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
@@ -1865,6 +2063,7 @@
     // and muting would result in unnecessary delay and dropped audio.
     const uint32_t outputLatencyMs = outputDesc->latency();
     bool requiresMuteCheck = outputDesc->isActive(outputLatencyMs * 2);  // account for drain
+    bool wasLeUnicastActive = isLeUnicastActive();
 
     // increment usage count for this stream on the requested output:
     // NOTE that the usage count is the same for duplicated output and hardware output which is
@@ -1943,11 +2142,15 @@
 
         // apply volume rules for current stream and device if necessary
         auto &curves = getVolumeCurves(client->attributes());
-        checkAndSetVolume(curves, client->volumeSource(),
+        if (NO_ERROR != checkAndSetVolume(curves, client->volumeSource(),
                           curves.getVolumeIndex(outputDesc->devices().types()),
                           outputDesc,
                           outputDesc->devices().types(), 0 /*delay*/,
-                          outputDesc->useHwGain() /*force*/);
+                          outputDesc->useHwGain() /*force*/)) {
+            // request AudioService to reinitialize the volume curves asynchronously
+            ALOGE("checkAndSetVolume failed, requesting volume range init");
+            mpClientInterface->onVolumeRangeInitRequest();
+        };
 
         // update the outputs if starting an output with a stream that can affect notification
         // routing
@@ -1988,9 +2191,38 @@
                                     AUDIO_FORMAT_DEFAULT);
     }
 
+    checkLeBroadcastRoutes(wasLeUnicastActive, outputDesc, *delayMs);
+
     return NO_ERROR;
 }
 
+void AudioPolicyManager::checkLeBroadcastRoutes(bool wasUnicastActive,
+        sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs) {
+    bool isUnicastActive = isLeUnicastActive();
+
+    if (wasUnicastActive != isUnicastActive) {
+        //reroute all outputs routed to LE broadcast if LE unicast activy changed on any output
+        for (size_t i = 0; i < mOutputs.size(); i++) {
+            sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+            if (desc != ignoredOutput && desc->isActive()
+                    && ((isUnicastActive &&
+                            !desc->devices().
+                                    getDevicesFromType(AUDIO_DEVICE_OUT_BLE_BROADCAST).isEmpty())
+                        || (wasUnicastActive &&
+                            !desc->devices().getDevicesFromTypes(
+                                    getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
+                DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
+                bool force = desc->devices() != newDevices;
+                setOutputDevices(desc, newDevices, force, delayMs);
+                // re-apply device specific volume if not done by setOutputDevice()
+                if (!force) {
+                    applyStreamVolumes(desc, newDevices.types(), delayMs);
+                }
+            }
+        }
+    }
+}
+
 status_t AudioPolicyManager::stopOutput(audio_port_handle_t portId)
 {
     ALOGV("%s portId %d", __FUNCTION__, portId);
@@ -2019,6 +2251,7 @@
     // always handle stream stop, check which stream type is stopping
     audio_stream_type_t stream = client->stream();
     auto clientVolSrc = client->volumeSource();
+    bool wasLeUnicastActive = isLeUnicastActive();
 
     handleEventForBeacon(stream == AUDIO_STREAM_TTS ? STOPPING_BEACON : STOPPING_OUTPUT);
 
@@ -2050,12 +2283,20 @@
         if (outputDesc->getActivityCount(clientVolSrc) == 0 || forceDeviceUpdate) {
             outputDesc->setStopTime(client, systemTime());
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
+
+            // If the routing does not change, if an output is routed on a device using HwGain
+            // (aka setAudioPortConfig) and there are still active clients following different
+            // volume group(s), force reapply volume
+            bool requiresVolumeCheck = outputDesc->getActivityCount(clientVolSrc) == 0 &&
+                    outputDesc->useHwGain() && outputDesc->isAnyActive(VOLUME_SOURCE_NONE);
+
             // delay the device switch by twice the latency because stopOutput() is executed when
             // the track stop() command is received and at that time the audio track buffer can
             // still contain data that needs to be drained. The latency only covers the audio HAL
             // and kernel buffers. Also the latency does not always include additional delay in the
             // audio path (audio DSP, CODEC ...)
-            setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2);
+            setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2,
+                             nullptr, true /*requiresMuteCheck*/, requiresVolumeCheck);
 
             // force restoring the device selection on other active outputs if it differs from the
             // one being selected for this output
@@ -2089,6 +2330,9 @@
         if (followsSameRouting(client->attributes(), attributes_initializer(AUDIO_USAGE_MEDIA))) {
             selectOutputForMusicEffects();
         }
+
+        checkLeBroadcastRoutes(wasLeUnicastActive, outputDesc, outputDesc->latency()*2);
+
         return NO_ERROR;
     } else {
         ALOGW("stopOutput() refcount is already 0");
@@ -2159,11 +2403,11 @@
                                              audio_port_handle_t *portId)
 {
     ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
-          "flags %#x attributes=%s", __func__, attr->source, config->sample_rate,
-          config->format, config->channel_mask, session, flags, toString(*attr).c_str());
+          "flags %#x attributes=%s requested device ID %d",
+          __func__, attr->source, config->sample_rate, config->format, config->channel_mask,
+          session, flags, toString(*attr).c_str(), *selectedDeviceId);
 
     status_t status = NO_ERROR;
-    audio_source_t halInputSource;
     audio_attributes_t attributes = *attr;
     sp<AudioPolicyMix> policyMix;
     sp<DeviceDescriptor> device;
@@ -2183,7 +2427,7 @@
     }
 
     // Explicit routing?
-    sp<DeviceDescriptor> explicitRoutingDevice = 
+    sp<DeviceDescriptor> explicitRoutingDevice =
             mAvailableInputDevices.getDeviceFromId(*selectedDeviceId);
 
     // special case for mmap capture: if an input IO handle is specified, we reuse this input if
@@ -2234,8 +2478,6 @@
     *input = AUDIO_IO_HANDLE_NONE;
     *inputType = API_INPUT_INVALID;
 
-    halInputSource = attributes.source;
-
     if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
             strncmp(attributes.tags, "addr=", strlen("addr=")) == 0) {
         status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
@@ -2349,6 +2591,10 @@
         flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_VOIP_TX);
     }
 
+    if (attributes.source == AUDIO_SOURCE_ULTRASOUND) {
+        flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_ULTRASOUND);
+    }
+
     // find a compatible input profile (not necessarily identical in parameters)
     sp<IOProfile> profile;
     // sampling rate and flags may be updated by getInputProfile
@@ -2365,11 +2611,11 @@
             break; // success
         } else if (profileFlags & AUDIO_INPUT_FLAG_RAW) {
             profileFlags = (audio_input_flags_t) (profileFlags & ~AUDIO_INPUT_FLAG_RAW); // retry
-        } else if (profileFlags != AUDIO_INPUT_FLAG_NONE) {
+        } else if (profileFlags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(config->format)) {
             profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
         } else { // fail
             ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
-                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(), 
+                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
                   config->sample_rate, config->format, config->channel_mask, flags);
             return input;
         }
@@ -2811,6 +3057,8 @@
         // HW Gain management, do not change the volume
         if (desc->useHwGain()) {
             applyVolume = false;
+            // If the volume source is active with higher priority source, ensure at least Sw Muted
+            desc->setSwMute((index == 0), vs, curves.getStreamTypes(), curDevices, 0 /*delayMs*/);
             for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
                 auto activeClients = desc->clientsList(true /*activeOnly*/, productStrategy,
                                                        false /*preferredDevice*/);
@@ -2850,7 +3098,7 @@
         // handled by system UI
         status_t volStatus = checkAndSetVolume(
                     curves, vs, index, desc, curDevices,
-                    ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM))?
+                    ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM, false))?
                          TOUCH_SOUND_FIXED_DELAY_MS : 0));
         if (volStatus != NO_ERROR) {
             status = volStatus;
@@ -3053,12 +3301,14 @@
 
 bool AudioPolicyManager::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
 {
-    return mOutputs.isActive(toVolumeSource(stream), inPastMs);
+    auto vs = toVolumeSource(stream, false);
+    return vs != VOLUME_SOURCE_NONE ? mOutputs.isActive(vs, inPastMs) : false;
 }
 
 bool AudioPolicyManager::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
 {
-    return mOutputs.isActiveRemotely(toVolumeSource(stream), inPastMs);
+    auto vs = toVolumeSource(stream, false);
+    return vs != VOLUME_SOURCE_NONE ? mOutputs.isActiveRemotely(vs, inPastMs) : false;
 }
 
 bool AudioPolicyManager::isSourceActive(audio_source_t source) const
@@ -3389,6 +3639,7 @@
 void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
 {
     uint32_t waitMs = 0;
+    bool wasLeUnicastActive = isLeUnicastActive();
     if (updateCallRouting(true /*fromCache*/, delayMs, &waitMs) == NO_ERROR) {
         // Only apply special touch sound delay once
         delayMs = 0;
@@ -3396,11 +3647,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(outputDesc, true /*fromCache*/);
-        if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) || (outputDesc != mPrimaryOutput)) {
+        if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
+                (outputDesc != mPrimaryOutput && !isTelephonyRxOrTx(outputDesc))) {
             // As done in setDeviceConnectionState, we could also fix default device issue by
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
-            waitMs = setOutputDevices(outputDesc, newDevices, !newDevices.isEmpty(), delayMs);
+            bool forceRouting = !newDevices.isEmpty();
+            waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
+                                      true /*requiresMuteCheck*/,
+                                      !forceRouting /*requiresVolumeCheck*/);
             // Only apply special touch sound delay once
             delayMs = 0;
         }
@@ -3408,6 +3663,7 @@
             applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
         }
     }
+    checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
 }
 
 void AudioPolicyManager::updateInputRouting() {
@@ -3569,7 +3825,7 @@
 void AudioPolicyManager::dump(String8 *dst) const
 {
     dst->appendFormat("\nAudioPolicyManager Dump: %p\n", this);
-    dst->appendFormat(" Primary Output: %d\n",
+    dst->appendFormat(" Primary Output I/O handle: %d\n",
              hasPrimaryOutput() ? mPrimaryOutput->mIoHandle : AUDIO_IO_HANDLE_NONE);
     std::string stateLiteral;
     AudioModeConverter::toString(mEngine->getPhoneState(), stateLiteral);
@@ -3591,15 +3847,17 @@
     }
     dst->appendFormat(" TTS output %savailable\n", mTtsOutputAvailable ? "" : "not ");
     dst->appendFormat(" Master mono: %s\n", mMasterMono ? "on" : "off");
-    dst->appendFormat(" Communnication Strategy: %d\n", mCommunnicationStrategy);
+    dst->appendFormat(" Communication Strategy id: %d\n", mCommunnicationStrategy);
     dst->appendFormat(" Config source: %s\n", mConfig.getSource().c_str()); // getConfig not const
 
-    mAvailableOutputDevices.dump(dst, String8("Available output"));
-    mAvailableInputDevices.dump(dst, String8("Available input"));
+    dst->append("\n");
+    mAvailableOutputDevices.dump(dst, String8("Available output"), 1);
+    dst->append("\n");
+    mAvailableInputDevices.dump(dst, String8("Available input"), 1);
     mHwModulesAll.dump(dst);
     mOutputs.dump(dst);
     mInputs.dump(dst);
-    mEffects.dump(dst);
+    mEffects.dump(dst, 1);
     mAudioPatches.dump(dst);
     mPolicyMixes.dump(dst);
     mAudioSources.dump(dst);
@@ -3639,53 +3897,7 @@
      offloadInfo.stream_type, offloadInfo.bit_rate, offloadInfo.duration_us,
      offloadInfo.has_video);
 
-    if (mMasterMono) {
-        return AUDIO_OFFLOAD_NOT_SUPPORTED; // no offloading if mono is set.
-    }
-
-    // Check if offload has been disabled
-    if (property_get_bool("audio.offload.disable", false /* default_value */)) {
-        ALOGV("%s: offload disabled by audio.offload.disable", __func__);
-        return AUDIO_OFFLOAD_NOT_SUPPORTED;
-    }
-
-    // Check if stream type is music, then only allow offload as of now.
-    if (offloadInfo.stream_type != AUDIO_STREAM_MUSIC)
-    {
-        ALOGV("%s: stream_type != MUSIC, returning false", __func__);
-        return AUDIO_OFFLOAD_NOT_SUPPORTED;
-    }
-
-    //TODO: enable audio offloading with video when ready
-    const bool allowOffloadWithVideo =
-            property_get_bool("audio.offload.video", false /* default_value */);
-    if (offloadInfo.has_video && !allowOffloadWithVideo) {
-        ALOGV("%s: has_video == true, returning false", __func__);
-        return AUDIO_OFFLOAD_NOT_SUPPORTED;
-    }
-
-    //If duration is less than minimum value defined in property, return false
-    const int min_duration_secs = property_get_int32(
-            "audio.offload.min.duration.secs", -1 /* default_value */);
-    if (min_duration_secs >= 0) {
-        if (offloadInfo.duration_us < min_duration_secs * 1000000LL) {
-            ALOGV("%s: Offload denied by duration < audio.offload.min.duration.secs(=%d)",
-                    __func__, min_duration_secs);
-            return AUDIO_OFFLOAD_NOT_SUPPORTED;
-        }
-    } else if (offloadInfo.duration_us < OFFLOAD_DEFAULT_MIN_DURATION_SECS * 1000000) {
-        ALOGV("%s: Offload denied by duration < default min(=%u)",
-                __func__, OFFLOAD_DEFAULT_MIN_DURATION_SECS);
-        return AUDIO_OFFLOAD_NOT_SUPPORTED;
-    }
-
-    // Do not allow offloading if one non offloadable effect is enabled. This prevents from
-    // creating an offloaded track and tearing it down immediately after start when audioflinger
-    // detects there is an active non offloadable effect.
-    // FIXME: We should check the audio session here but we do not have it in this context.
-    // This may prevent offloading in rare situations where effects are left active by apps
-    // in the background.
-    if (mEffects.isNonOffloadableEffectEnabled()) {
+    if (!isOffloadPossible(offloadInfo)) {
         return AUDIO_OFFLOAD_NOT_SUPPORTED;
     }
 
@@ -3713,7 +3925,8 @@
                                                  const audio_attributes_t& attributes) {
     audio_output_flags_t output_flags = AUDIO_OUTPUT_FLAG_NONE;
     audio_flags_to_audio_output_flags(attributes.flags, &output_flags);
-    sp<IOProfile> profile = getProfileForOutput(DeviceVector() /*ignore device */,
+    DeviceVector outputDevices = mEngine->getOutputDevicesForAttributes(attributes);
+    sp<IOProfile> profile = getProfileForOutput(outputDevices,
                                             config.sample_rate,
                                             config.format,
                                             config.channel_mask,
@@ -3724,7 +3937,197 @@
         __FUNCTION__, profile != 0 ? "" : "NOT ",
         (profile != 0 ? profile->getTagName().c_str() : "null"),
         config.sample_rate, config.format, config.channel_mask, output_flags);
-    return (profile != 0);
+
+    // also try the MSD module if compatible profile not found
+    if (profile == nullptr) {
+        profile = getMsdProfileForOutput(outputDevices,
+                                              config.sample_rate,
+                                              config.format,
+                                              config.channel_mask,
+                                              output_flags,
+                                              true /* directOnly */);
+        ALOGV("%s() MSD profile %sfound with name: %s, "
+            "sample rate: %u, format: 0x%x, channel_mask: 0x%x, output flags: 0x%x",
+            __FUNCTION__, profile != 0 ? "" : "NOT ",
+            (profile != 0 ? profile->getTagName().c_str() : "null"),
+            config.sample_rate, config.format, config.channel_mask, output_flags);
+    }
+    return (profile != nullptr);
+}
+
+bool AudioPolicyManager::isOffloadPossible(const audio_offload_info_t &offloadInfo,
+                                           bool durationIgnored) {
+    if (mMasterMono) {
+        return false; // no offloading if mono is set.
+    }
+
+    // Check if offload has been disabled
+    if (property_get_bool("audio.offload.disable", false /* default_value */)) {
+        ALOGV("%s: offload disabled by audio.offload.disable", __func__);
+        return false;
+    }
+
+    // Check if stream type is music, then only allow offload as of now.
+    if (offloadInfo.stream_type != AUDIO_STREAM_MUSIC)
+    {
+        ALOGV("%s: stream_type != MUSIC, returning false", __func__);
+        return false;
+    }
+
+    //TODO: enable audio offloading with video when ready
+    const bool allowOffloadWithVideo =
+            property_get_bool("audio.offload.video", false /* default_value */);
+    if (offloadInfo.has_video && !allowOffloadWithVideo) {
+        ALOGV("%s: has_video == true, returning false", __func__);
+        return false;
+    }
+
+    //If duration is less than minimum value defined in property, return false
+    const int min_duration_secs = property_get_int32(
+            "audio.offload.min.duration.secs", -1 /* default_value */);
+    if (!durationIgnored) {
+        if (min_duration_secs >= 0) {
+            if (offloadInfo.duration_us < min_duration_secs * 1000000LL) {
+                ALOGV("%s: Offload denied by duration < audio.offload.min.duration.secs(=%d)",
+                      __func__, min_duration_secs);
+                return false;
+            }
+        } else if (offloadInfo.duration_us < OFFLOAD_DEFAULT_MIN_DURATION_SECS * 1000000) {
+            ALOGV("%s: Offload denied by duration < default min(=%u)",
+                  __func__, OFFLOAD_DEFAULT_MIN_DURATION_SECS);
+            return false;
+        }
+    }
+
+    // Do not allow offloading if one non offloadable effect is enabled. This prevents from
+    // creating an offloaded track and tearing it down immediately after start when audioflinger
+    // detects there is an active non offloadable effect.
+    // FIXME: We should check the audio session here but we do not have it in this context.
+    // This may prevent offloading in rare situations where effects are left active by apps
+    // in the background.
+    if (mEffects.isNonOffloadableEffectEnabled()) {
+        return false;
+    }
+
+    return true;
+}
+
+audio_direct_mode_t AudioPolicyManager::getDirectPlaybackSupport(const audio_attributes_t *attr,
+                                                                 const audio_config_t *config) {
+    audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+    offloadInfo.format = config->format;
+    offloadInfo.sample_rate = config->sample_rate;
+    offloadInfo.channel_mask = config->channel_mask;
+    offloadInfo.stream_type = mEngine->getStreamTypeForAttributes(*attr);
+    offloadInfo.has_video = false;
+    offloadInfo.is_streaming = false;
+    const bool offloadPossible = isOffloadPossible(offloadInfo, true /*durationIgnored*/);
+
+    audio_direct_mode_t directMode = AUDIO_DIRECT_NOT_SUPPORTED;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+    audio_flags_to_audio_output_flags(attr->flags, &flags);
+    // only retain flags that will drive compressed offload or passthrough
+    uint32_t relevantFlags = AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
+    if (offloadPossible) {
+        relevantFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+    }
+    flags = (audio_output_flags_t)((flags & relevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
+
+    DeviceVector engineOutputDevices = mEngine->getOutputDevicesForAttributes(*attr);
+    for (const auto& hwModule : mHwModules) {
+        DeviceVector outputDevices = engineOutputDevices;
+        // the MSD module checks for different conditions and output devices
+        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+            if (!msdHasPatchesToAllDevices(engineOutputDevices.toTypeAddrVector())) {
+                continue;
+            }
+            outputDevices = getMsdAudioOutDevices();
+        }
+        for (const auto& curProfile : hwModule->getOutputProfiles()) {
+            if (!curProfile->isCompatibleProfile(outputDevices,
+                    config->sample_rate, nullptr /*updatedSamplingRate*/,
+                    config->format, nullptr /*updatedFormat*/,
+                    config->channel_mask, nullptr /*updatedChannelMask*/,
+                    flags)) {
+                continue;
+            }
+            // reject profiles not corresponding to a device currently available
+            if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
+                continue;
+            }
+            if ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)
+                        != AUDIO_OUTPUT_FLAG_NONE) {
+                if ((directMode & AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED)
+                        != AUDIO_DIRECT_NOT_SUPPORTED) {
+                    // Already reports offload gapless supported. No need to report offload support.
+                    continue;
+                }
+                if ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD)
+                        != AUDIO_OUTPUT_FLAG_NONE) {
+                    // If offload gapless is reported, no need to report offload support.
+                    directMode = (audio_direct_mode_t) ((directMode &
+                            ~AUDIO_DIRECT_OFFLOAD_SUPPORTED) |
+                            AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED);
+                } else {
+                    directMode = (audio_direct_mode_t)(directMode | AUDIO_DIRECT_OFFLOAD_SUPPORTED);
+                }
+            } else {
+                directMode = (audio_direct_mode_t) (directMode | AUDIO_DIRECT_BITSTREAM_SUPPORTED);
+            }
+        }
+    }
+    return directMode;
+}
+
+status_t AudioPolicyManager::getDirectProfilesForAttributes(const audio_attributes_t* attr,
+                                                AudioProfileVector& audioProfilesVector) {
+    AudioDeviceTypeAddrVector devices;
+    status_t status = getDevicesForAttributes(*attr, &devices, false /* forVolume */);
+    if (status != OK) {
+        return status;
+    }
+    ALOGV("%s: found %zu output devices for attributes.", __func__, devices.size());
+    if (devices.empty()) {
+        return OK; // no output devices for the attributes
+    }
+
+    for (const auto& hwModule : mHwModules) {
+        // the MSD module checks for different conditions
+        if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+            continue;
+        }
+        for (const auto& outputProfile : hwModule->getOutputProfiles()) {
+            if (!outputProfile->asAudioPort()->isDirectOutput()) {
+                continue;
+            }
+            // allow only profiles that support all the available and routed devices
+            if (outputProfile->getSupportedDevices().getDevicesFromDeviceTypeAddrVec(devices).size()
+                    != devices.size()) {
+                continue;
+            }
+            audioProfilesVector.addAllValidProfiles(
+                    outputProfile->asAudioPort()->getAudioProfiles());
+        }
+    }
+
+    // add the direct profiles from MSD if present and has audio patches to all the output(s)
+    const auto& msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    if (msdModule != nullptr) {
+        if (msdHasPatchesToAllDevices(devices)) {
+            ALOGV("%s: MSD audio patches set to all output devices.", __func__);
+            for (const auto& outputProfile : msdModule->getOutputProfiles()) {
+                if (!outputProfile->asAudioPort()->isDirectOutput()) {
+                    continue;
+                }
+                audioProfilesVector.addAllValidProfiles(
+                        outputProfile->asAudioPort()->getAudioProfiles());
+            }
+        } else {
+            ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
+        }
+    }
+
+    return NO_ERROR;
 }
 
 status_t AudioPolicyManager::listAudioPorts(audio_port_role_t role,
@@ -3824,17 +4227,15 @@
     return BAD_VALUE;
 }
 
-status_t AudioPolicyManager::createAudioPatchInternal(const struct audio_patch *patch,
-                                                      audio_patch_handle_t *handle,
-                                                      uid_t uid, uint32_t delayMs,
-                                                      const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch,
+                                              audio_patch_handle_t *handle,
+                                              uid_t uid)
 {
     ALOGV("%s", __func__);
     if (handle == NULL || patch == NULL) {
         return BAD_VALUE;
     }
     ALOGV("%s num sources %d num sinks %d", __func__, patch->num_sources, patch->num_sinks);
-
     if (!audio_patch_is_valid(patch)) {
         return BAD_VALUE;
     }
@@ -3842,7 +4243,6 @@
     if (patch->num_sources > 1) {
         return INVALID_OPERATION;
     }
-
     if (patch->sources[0].role != AUDIO_PORT_ROLE_SOURCE) {
         return INVALID_OPERATION;
     }
@@ -3852,6 +4252,86 @@
         }
     }
 
+    sp<DeviceDescriptor> srcDevice = mAvailableInputDevices.getDeviceFromId(patch->sources[0].id);
+    sp<DeviceDescriptor> sinkDevice = mAvailableOutputDevices.getDeviceFromId(patch->sinks[0].id);
+    if (srcDevice == nullptr || sinkDevice == nullptr) {
+        ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
+        return BAD_VALUE;
+    }
+    ALOGV("%s between source %s and sink %s", __func__,
+            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+    audio_port_handle_t portId = PolicyAudioPort::getNextUniqueId();
+    // Default attributes, default volume priority, not to infer with non raw audio patches.
+    audio_attributes_t attributes = attributes_initializer(AUDIO_USAGE_MEDIA);
+    const struct audio_port_config *source = &patch->sources[0];
+    sp<SourceClientDescriptor> sourceDesc =
+            new InternalSourceClientDescriptor(
+                portId, uid, attributes, *source, srcDevice, sinkDevice,
+                mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes));
+
+    status_t status =
+            connectAudioSourceToSink(sourceDesc, sinkDevice, patch, *handle, uid, 0 /* delayMs */);
+
+    if (status != NO_ERROR) {
+        return INVALID_OPERATION;
+    }
+    mAudioSources.add(portId, sourceDesc);
+    return NO_ERROR;
+}
+
+status_t AudioPolicyManager::connectAudioSourceToSink(
+        const sp<SourceClientDescriptor>& sourceDesc, const sp<DeviceDescriptor> &sinkDevice,
+        const struct audio_patch *patch,
+        audio_patch_handle_t &handle,
+        uid_t uid, uint32_t delayMs)
+{
+    status_t status = createAudioPatchInternal(patch, &handle, uid, delayMs, sourceDesc);
+    if (status != NO_ERROR || mAudioPatches.indexOfKey(handle) < 0) {
+        ALOGW("%s patch panel could not connect device patch, error %d", __func__, status);
+        return INVALID_OPERATION;
+    }
+    sourceDesc->connect(handle, sinkDevice);
+    if (isMsdPatch(handle)) {
+        return NO_ERROR;
+    }
+    // SW Bridge? (@todo: HW bridge, keep track of HwOutput for device selection "reconsideration")
+    sp<SwAudioOutputDescriptor> swOutput = sourceDesc->swOutput().promote();
+    ALOG_ASSERT(swOutput != nullptr, "%s: a swOutput shall always be associated", __func__);
+    if (swOutput->getClient(sourceDesc->portId()) != nullptr) {
+        ALOGW("%s source portId has already been attached to outputDesc", __func__);
+        goto FailurePatchAdded;
+    }
+    status = swOutput->start();
+    if (status != NO_ERROR) {
+        goto FailureSourceAdded;
+    }
+    swOutput->addClient(sourceDesc);
+    status = startSource(swOutput, sourceDesc, &delayMs);
+    if (status != NO_ERROR) {
+        ALOGW("%s failed to start source, error %d", __FUNCTION__, status);
+        goto FailureSourceActive;
+    }
+    if (delayMs != 0) {
+        usleep(delayMs * 1000);
+    }
+    return NO_ERROR;
+
+FailureSourceActive:
+    swOutput->stop();
+    releaseOutput(sourceDesc->portId());
+FailureSourceAdded:
+    sourceDesc->setSwOutput(nullptr);
+FailurePatchAdded:
+    releaseAudioPatchInternal(handle);
+    return INVALID_OPERATION;
+}
+
+status_t AudioPolicyManager::createAudioPatchInternal(const struct audio_patch *patch,
+                                                      audio_patch_handle_t *handle,
+                                                      uid_t uid, uint32_t delayMs,
+                                                      const sp<SourceClientDescriptor>& sourceDesc)
+{
+    ALOGV("%s num sources %d num sinks %d", __func__, patch->num_sources, patch->num_sinks);
     sp<AudioPatch> patchDesc;
     ssize_t index = mAudioPatches.indexOfKey(*handle);
 
@@ -4040,7 +4520,7 @@
                 // in config XML to reach the sink so that is can be declared as available.
                 audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
                 sp<SwAudioOutputDescriptor> outputDesc = nullptr;
-                if (sourceDesc != nullptr) {
+                if (!sourceDesc->isInternal()) {
                     // take care of dynamic routing for SwOutput selection,
                     audio_attributes_t attributes = sourceDesc->attributes();
                     audio_stream_type_t stream = sourceDesc->stream();
@@ -4053,10 +4533,11 @@
                     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
                     bool isRequestedDeviceForExclusiveUse = false;
                     output_type_t outputType;
+                    bool isSpatialized;
                     getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
                                         &stream, sourceDesc->uid(), &config, &flags,
                                         &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
-                                        nullptr, &outputType);
+                                        nullptr, &outputType, &isSpatialized);
                     if (output == AUDIO_IO_HANDLE_NONE) {
                         ALOGV("%s no output for device %s",
                               __FUNCTION__, sinkDevice->toString().c_str());
@@ -4068,44 +4549,49 @@
                         return INVALID_OPERATION;
                     }
                     sourceDesc->setSwOutput(outputDesc);
+                } else {
+                    // Same for "raw patches" aka created from createAudioPatch API
+                    SortedVector<audio_io_handle_t> outputs =
+                            getOutputsForDevices(DeviceVector(sinkDevice), mOutputs);
+                    // if the sink device is reachable via an opened output stream, request to
+                    // go via this output stream by adding a second source to the patch
+                    // description
+                    output = selectOutput(outputs);
+                    if (output == AUDIO_IO_HANDLE_NONE) {
+                        ALOGE("%s no output available for internal patch sink", __func__);
+                        return INVALID_OPERATION;
+                    }
+                    outputDesc = mOutputs.valueFor(output);
+                    if (outputDesc->isDuplicated()) {
+                        ALOGV("%s output for device %s is duplicated",
+                              __func__, sinkDevice->toString().c_str());
+                        return INVALID_OPERATION;
+                    }
+                    sourceDesc->setSwOutput(outputDesc);
                 }
                 // create a software bridge in PatchPanel if:
                 // - source and sink devices are on different HW modules OR
                 // - audio HAL version is < 3.0
                 // - audio HAL version is >= 3.0 but no route has been declared between devices
-                // - called from startAudioSource (aka sourceDesc != nullptr) and source device does
-                //   not have a gain controller
+                // - called from startAudioSource (aka sourceDesc is not internal) and source device
+                //   does not have a gain controller
                 if (!srcDevice->hasSameHwModuleAs(sinkDevice) ||
                         (srcDevice->getModuleVersionMajor() < 3) ||
                         !srcDevice->getModule()->supportsPatch(srcDevice, sinkDevice) ||
-                        (sourceDesc != nullptr &&
+                        (!sourceDesc->isInternal() &&
                          srcDevice->getAudioPort()->getGains().size() == 0)) {
                     // support only one sink device for now to simplify output selection logic
                     if (patch->num_sinks > 1) {
                         return INVALID_OPERATION;
                     }
-                    if (sourceDesc == nullptr) {
-                        SortedVector<audio_io_handle_t> outputs =
-                                getOutputsForDevices(DeviceVector(sinkDevice), mOutputs);
-                        // if the sink device is reachable via an opened output stream, request to
-                        // go via this output stream by adding a second source to the patch
-                        // description
-                        output = selectOutput(outputs);
-                        if (output != AUDIO_IO_HANDLE_NONE) {
-                            outputDesc = mOutputs.valueFor(output);
-                            if (outputDesc->isDuplicated()) {
-                                ALOGV("%s output for device %s is duplicated",
-                                      __FUNCTION__, sinkDevice->toString().c_str());
-                                return INVALID_OPERATION;
-                            }
-                        }
-                    }
+                    sourceDesc->setUseSwBridge();
                     if (outputDesc != nullptr) {
                         audio_port_config srcMixPortConfig = {};
-                        outputDesc->toAudioPortConfig(&srcMixPortConfig, &patch->sources[0]);
+                        outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
-                        srcMixPortConfig.ext.mix.usecase.stream = sourceDesc != nullptr ?
-                                    sourceDesc->stream() : AUDIO_STREAM_PATCH;
+                        srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+                                    mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
+                                    AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
                     }
                 }
@@ -4128,11 +4614,9 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::releaseAudioPatch(audio_patch_handle_t handle,
-                                                  uid_t uid)
+status_t AudioPolicyManager::releaseAudioPatch(audio_patch_handle_t handle, uid_t uid)
 {
-    ALOGV("releaseAudioPatch() patch %d", handle);
-
+    ALOGV("%s patch %d", __func__, handle);
     ssize_t index = mAudioPatches.indexOfKey(handle);
 
     if (index < 0) {
@@ -4144,11 +4628,21 @@
     if (patchDesc->getUid() != mUidCached && uid != patchDesc->getUid()) {
         return INVALID_OPERATION;
     }
-    return releaseAudioPatchInternal(handle);
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    for (size_t i = 0; i < mAudioSources.size(); i++)  {
+        sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
+        if (sourceDesc != nullptr && sourceDesc->getPatchHandle() == handle) {
+            portId = sourceDesc->portId();
+            break;
+        }
+    }
+    return portId != AUDIO_PORT_HANDLE_NONE ?
+                stopAudioSource(portId) : releaseAudioPatchInternal(handle);
 }
 
 status_t AudioPolicyManager::releaseAudioPatchInternal(audio_patch_handle_t handle,
-                                                       uint32_t delayMs)
+                                                       uint32_t delayMs,
+                                                       const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s patch %d", __func__, handle);
     if (mAudioPatches.indexOfKey(handle) < 0) {
@@ -4189,26 +4683,29 @@
             removeAudioPatch(patchDesc->getHandle());
             nextAudioPortGeneration();
             mpClientInterface->onAudioPatchListUpdate();
-            // SW Bridge
+            // SW or HW Bridge
+            sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+            audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
             if (patch->num_sources > 1 && patch->sources[1].type == AUDIO_PORT_TYPE_MIX) {
-                sp<SwAudioOutputDescriptor> outputDesc =
-                        mOutputs.getOutputFromId(patch->sources[1].id);
-                if (outputDesc == NULL) {
-                    ALOGW("%s output not found for id %d", __func__, patch->sources[0].id);
-                    // releaseOutput has already called closeOuput in case of direct output
-                    return NO_ERROR;
-                }
-                if (patchDesc->getHandle() != outputDesc->getPatchHandle()) {
-                    // force SwOutput patch removal as AF counter part patch has already gone.
-                    ALOGV("%s reset patch handle on Output as different from SWBridge", __func__);
-                    removeAudioPatch(outputDesc->getPatchHandle());
-                }
-                outputDesc->setPatchHandle(AUDIO_PATCH_HANDLE_NONE);
+                outputDesc = mOutputs.getOutputFromId(patch->sources[1].id);
+            } else if (patch->num_sources == 1 && sourceDesc != nullptr) {
+                outputDesc = sourceDesc->swOutput().promote();
+            }
+            if (outputDesc == nullptr) {
+                ALOGW("%s no output for id %d", __func__, patch->sources[0].id);
+                // releaseOutput has already called closeOutput in case of direct output
+                return NO_ERROR;
+            }
+            if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
+                resetOutputDevice(outputDesc);
+            } else {
+                // Reuse patch handle if still valid / do not force rerouting if still routed
+                patchHandle = outputDesc->getPatchHandle();
                 setOutputDevices(outputDesc,
                                  getNewOutputDevices(outputDesc, true /*fromCache*/),
-                                 true, /*force*/
+                                 patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
                                  0,
-                                 NULL);
+                                 patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
             }
         } else {
             return BAD_VALUE;
@@ -4454,6 +4951,18 @@
     return status;
 }
 
+sp<SourceClientDescriptor> AudioPolicyManager::startAudioSourceInternal(
+        const struct audio_port_config *source, const audio_attributes_t *attributes, uid_t uid)
+{
+    ALOGV("%s", __FUNCTION__);
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+
+    status_t status = startAudioSource(source, attributes, &portId, uid);
+    ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
+    return mAudioSources.valueFor(portId);
+}
+
+
 status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
@@ -4478,52 +4987,9 @@
     PatchBuilder patchBuilder;
     patchBuilder.addSink(sinkDevice).addSource(srcDevice);
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
-    status_t status =
-            createAudioPatchInternal(patchBuilder.patch(), &handle, mUidCached, 0, sourceDesc);
-    if (status != NO_ERROR || mAudioPatches.indexOfKey(handle) < 0) {
-        ALOGW("%s patch panel could not connect device patch, error %d", __func__, status);
-        return INVALID_OPERATION;
-    }
-    sourceDesc->connect(handle, sinkDevice);
-    // SW Bridge? (@todo: HW bridge, keep track of HwOutput for device selection "reconsideration")
-    sp<SwAudioOutputDescriptor> swOutput = sourceDesc->swOutput().promote();
-    if (swOutput != 0) {
-        status = swOutput->start();
-        if (status != NO_ERROR) {
-            goto FailureSourceAdded;
-        }
-        if (swOutput->getClient(sourceDesc->portId()) != nullptr) {
-            ALOGW("%s source portId has already been attached to outputDesc", __func__);
-            goto FailureReleasePatch;
-        }
-        swOutput->addClient(sourceDesc);
-        uint32_t delayMs = 0;
-        status = startSource(swOutput, sourceDesc, &delayMs);
-        if (status != NO_ERROR) {
-            ALOGW("%s failed to start source, error %d", __FUNCTION__, status);
-            goto FailureSourceActive;
-        }
-        if (delayMs != 0) {
-            usleep(delayMs * 1000);
-        }
-    } else {
-        sp<HwAudioOutputDescriptor> hwOutputDesc = sourceDesc->hwOutput().promote();
-        if (hwOutputDesc != 0) {
-          //   create Hwoutput and add to mHwOutputs
-        } else {
-            ALOGW("%s source has neither SW nor HW output", __FUNCTION__);
-        }
-    }
-    return NO_ERROR;
 
-FailureSourceActive:
-    swOutput->stop();
-    releaseOutput(sourceDesc->portId());
-FailureSourceAdded:
-    sourceDesc->setSwOutput(nullptr);
-FailureReleasePatch:
-    releaseAudioPatchInternal(handle);
-    return INVALID_OPERATION;
+    return connectAudioSourceToSink(
+                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
 }
 
 status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -4786,6 +5252,37 @@
     return false;
 }
 
+bool AudioPolicyManager::isUltrasoundSupported()
+{
+    bool hasUltrasoundOutput = false;
+    bool hasUltrasoundInput = false;
+    for (const auto& hwModule : mHwModules) {
+        const OutputProfileCollection &outputProfiles = hwModule->getOutputProfiles();
+        if (!hasUltrasoundOutput) {
+            for (const auto &outProfile : outputProfiles) {
+                if (outProfile->getFlags() & AUDIO_OUTPUT_FLAG_ULTRASOUND) {
+                    hasUltrasoundOutput = true;
+                    break;
+                }
+            }
+        }
+
+        const InputProfileCollection &inputProfiles = hwModule->getInputProfiles();
+        if (!hasUltrasoundInput) {
+            for (const auto &inputProfile : inputProfiles) {
+                if (inputProfile->getFlags() & AUDIO_INPUT_FLAG_ULTRASOUND) {
+                    hasUltrasoundInput = true;
+                    break;
+                }
+            }
+        }
+
+        if (hasUltrasoundOutput && hasUltrasoundInput)
+            return true;
+    }
+    return false;
+}
+
 bool AudioPolicyManager::isCallScreenModeSupported()
 {
     return getConfig().isCallScreenModeSupported();
@@ -4818,7 +5315,7 @@
             ALOGW("%s source has neither SW nor HW output", __FUNCTION__);
         }
     }
-    status_t status = releaseAudioPatchInternal(sourceDesc->getPatchHandle());
+    status_t status = releaseAudioPatchInternal(sourceDesc->getPatchHandle(), 0, sourceDesc);
     sourceDesc->disconnect();
     return status;
 }
@@ -4839,25 +5336,9 @@
     return source;
 }
 
-/* static */
-bool AudioPolicyManager::isChannelMaskSpatialized(audio_channel_mask_t channels) {
-    switch (channels) {
-        case AUDIO_CHANNEL_OUT_5POINT1:
-        case AUDIO_CHANNEL_OUT_5POINT1POINT2:
-        case AUDIO_CHANNEL_OUT_5POINT1POINT4:
-        case AUDIO_CHANNEL_OUT_7POINT1:
-        case AUDIO_CHANNEL_OUT_7POINT1POINT2:
-        case AUDIO_CHANNEL_OUT_7POINT1POINT4:
-            return true;
-        default:
-            return false;
-    }
-}
-
 bool AudioPolicyManager::canBeSpatializedInt(const audio_attributes_t *attr,
                                       const audio_config_t *config,
-                                      const AudioDeviceTypeAddrVector &devices,
-                                      bool allowCurrentOutputReconfig)  const
+                                      const AudioDeviceTypeAddrVector &devices)  const
 {
     // The caller can have the audio attributes criteria ignored by either passing a null ptr or
     // the AUDIO_ATTRIBUTES_INITIALIZER value.
@@ -4886,20 +5367,11 @@
     // the AUDIO_CONFIG_INITIALIZER value.
     // If an audio config is specified, current policy is to only allow spatialization for
     // some positional channel masks.
-    // If the spatializer output is already opened, only channel masks included in the
-    // spatializer output mixer channel mask are allowed.
 
     if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        if (!isChannelMaskSpatialized(config->channel_mask)) {
+        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
             return false;
         }
-        if (!allowCurrentOutputReconfig && mSpatializerOutput != nullptr
-                && mSpatializerOutput->mProfile == profile) {
-            if ((config->channel_mask & mSpatializerOutput->mMixerChannelMask)
-                    != config->channel_mask) {
-                return false;
-            }
-        }
     }
     return true;
 }
@@ -4915,8 +5387,7 @@
             audio_config_base_t clientConfig = client->config();
             audio_config_t config = audio_config_initializer(&clientConfig);
             if (desc != mSpatializerOutput
-                    && canBeSpatializedInt(&attr, &config,
-                            devicesTypeAddress, false /* allowCurrentOutputReconfig */)) {
+                    && canBeSpatializedInt(&attr, &config, devicesTypeAddress)) {
                 streamsToInvalidate.insert(client->stream());
             }
         }
@@ -4927,6 +5398,29 @@
     }
 }
 
+
+bool AudioPolicyManager::isOutputOnlyAvailableRouteToSomeDevice(
+        const sp<SwAudioOutputDescriptor>& outputDesc) {
+    if (outputDesc->isDuplicated()) {
+        return false;
+    }
+    DeviceVector devices = outputDesc->supportedDevices();
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+        if (desc == outputDesc || desc->isDuplicated()) {
+            continue;
+        }
+        DeviceVector sharedDevices = desc->filterSupportedDevices(devices);
+        if (!sharedDevices.isEmpty()
+                && (desc->devicesSupportEncodedFormats(sharedDevices.types())
+                    == outputDesc->devicesSupportEncodedFormats(sharedDevices.types()))) {
+            return false;
+        }
+    }
+    return true;
+}
+
+
 status_t AudioPolicyManager::getSpatializerOutput(const audio_config_base_t *mixerConfig,
                                                         const audio_attributes_t *attr,
                                                         audio_io_handle_t *output) {
@@ -4940,82 +5434,68 @@
         config = audio_config_initializer(mixerConfig);
         configPtr = &config;
     }
-    if (!canBeSpatializedInt(
-            attr, configPtr, devicesTypeAddress)) {
-        ALOGW("%s provided attributes or mixer config cannot be spatialized", __func__);
+    if (!canBeSpatializedInt(attr, configPtr, devicesTypeAddress)) {
+        ALOGV("%s provided attributes or mixer config cannot be spatialized", __func__);
         return BAD_VALUE;
     }
 
     sp<IOProfile> profile =
             getSpatializerOutputProfile(configPtr, devicesTypeAddress);
     if (profile == nullptr) {
-        ALOGW("%s no suitable output profile for provided attributes or mixer config", __func__);
+        ALOGV("%s no suitable output profile for provided attributes or mixer config", __func__);
         return BAD_VALUE;
     }
 
-    if (mSpatializerOutput != nullptr && mSpatializerOutput->mProfile == profile
-            && configPtr != nullptr
-            && configPtr->channel_mask == mSpatializerOutput->mMixerChannelMask) {
-        *output = mSpatializerOutput->mIoHandle;
-        ALOGV("%s returns current spatializer output %d", __func__, *output);
-        return NO_ERROR;
-    }
-    mSpatializerOutput.clear();
+    std::vector<sp<SwAudioOutputDescriptor>> spatializerOutputs;
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-        if (!desc->isDuplicated() && desc->mProfile == profile) {
-            ALOGV("%s found output %d for spatializer profile", __func__, desc->mIoHandle);
-            mSpatializerOutput = desc;
-            break;
+        if (!desc->isDuplicated()
+                && (desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
+            spatializerOutputs.push_back(desc);
+            ALOGV("%s adding opened spatializer Output %d", __func__, desc->mIoHandle);
         }
     }
-    if (mSpatializerOutput == nullptr) {
-        ALOGW("%s no opened spatializer output for profile %s",
-                __func__, profile->getName().c_str());
-        return BAD_VALUE;
+    mSpatializerOutput.clear();
+    bool outputsChanged = false;
+    for (const auto& desc : spatializerOutputs) {
+        if (desc->mProfile == profile
+                && (configPtr == nullptr
+                   || configPtr->channel_mask == desc->mMixerChannelMask)) {
+            mSpatializerOutput = desc;
+            ALOGV("%s reusing current spatializer output %d", __func__, desc->mIoHandle);
+        } else {
+            ALOGV("%s closing spatializerOutput output %d to match channel mask %#x"
+                    " and devices %s", __func__, desc->mIoHandle,
+                    configPtr != nullptr ? configPtr->channel_mask : 0,
+                    devices.toString().c_str());
+            closeOutput(desc->mIoHandle);
+            outputsChanged = true;
+        }
     }
 
-    if (configPtr != nullptr
-            && configPtr->channel_mask != mSpatializerOutput->mMixerChannelMask) {
-        audio_config_base_t savedMixerConfig = {
-            .sample_rate = mSpatializerOutput->getSamplingRate(),
-            .format = mSpatializerOutput->getFormat(),
-            .channel_mask = mSpatializerOutput->mMixerChannelMask,
-        };
-        DeviceVector savedDevices = mSpatializerOutput->devices();
-
-        ALOGV("%s reopening spatializer output to match channel mask %#x (current mask %#x)",
-            __func__, configPtr->channel_mask, mSpatializerOutput->mMixerChannelMask);
-
-        closeOutput(mSpatializerOutput->mIoHandle);
-        //from now on mSpatializerOutput is null
-
+    if (mSpatializerOutput == nullptr) {
         sp<SwAudioOutputDescriptor> desc =
                 openOutputWithProfileAndDevice(profile, devices, mixerConfig);
-        if (desc == nullptr) {
-            // re open the spatializer output with previous channel mask
-            desc = openOutputWithProfileAndDevice(profile, savedDevices, &savedMixerConfig);
-            if (desc == nullptr) {
-                ALOGE("%s failed to restore mSpatializerOutput with previous config", __func__);
-            } else {
-                mSpatializerOutput = desc;
-            }
-            mPreviousOutputs = mOutputs;
-            mpClientInterface->onAudioPortListUpdate();
-            *output = AUDIO_IO_HANDLE_NONE;
-            ALOGW("%s could not open spatializer output with requested config", __func__);
-            return BAD_VALUE;
+        if (desc != nullptr) {
+            mSpatializerOutput = desc;
+            outputsChanged = true;
         }
-        mSpatializerOutput = desc;
-        mPreviousOutputs = mOutputs;
-        mpClientInterface->onAudioPortListUpdate();
     }
 
     checkVirtualizerClientRoutes();
 
+    if (outputsChanged) {
+        mPreviousOutputs = mOutputs;
+        mpClientInterface->onAudioPortListUpdate();
+    }
+
+    if (mSpatializerOutput == nullptr) {
+        ALOGV("%s could not open spatializer output with requested config", __func__);
+        return BAD_VALUE;
+    }
     *output = mSpatializerOutput->mIoHandle;
-    ALOGV("%s returns new spatializer output %d", __func__, *output);
-    return NO_ERROR;
+    ALOGV("%s returning new spatializer output %d", __func__, *output);
+    return OK;
 }
 
 status_t AudioPolicyManager::releaseSpatializerOutput(audio_io_handle_t output) {
@@ -5026,9 +5506,12 @@
         return BAD_VALUE;
     }
 
-    mSpatializerOutput.clear();
-
-    checkVirtualizerClientRoutes();
+    if (!isOutputOnlyAvailableRouteToSomeDevice(mSpatializerOutput)) {
+        ALOGV("%s closing spatializer output %d", __func__, mSpatializerOutput->mIoHandle);
+        closeOutput(mSpatializerOutput->mIoHandle);
+        //from now on mSpatializerOutput is null
+        checkVirtualizerClientRoutes();
+    }
 
     return NO_ERROR;
 }
@@ -5082,8 +5565,6 @@
         ALOGE("could not load audio policy configuration file, setting defaults");
         getConfig().setDefault();
     }
-    //TODO: b/193496180 use spatializer flag at audio HAL when available
-    getConfig().convertSpatializerFlag();
 }
 
 status_t AudioPolicyManager::initialize() {
@@ -5107,6 +5588,7 @@
         return status;
     }
 
+    mEngine->updateDeviceSelectionCache();
     mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
         mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
 
@@ -5186,9 +5668,8 @@
             continue;
         }
         mHwModules.push_back(hwModule);
-        // open all output streams needed to access attached devices
-        // except for direct output streams that are only opened when they are actually
-        // required by an app.
+        // open all output streams needed to access attached devices.
+        // direct outputs are closed immediately after checking the availability of attached devices
         // This also validates mAvailableOutputDevices list
         for (const auto& outProfile : hwModule->getOutputProfiles()) {
             if (!outProfile->canOpenNewIo()) {
@@ -5200,7 +5681,8 @@
                 ALOGW("Output profile contains no device on module %s", hwModule->getName());
                 continue;
             }
-            if ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_TTS) != 0) {
+            if ((outProfile->getFlags() & AUDIO_OUTPUT_FLAG_TTS) != 0 ||
+                (outProfile->getFlags() & AUDIO_OUTPUT_FLAG_ULTRASOUND) != 0) {
                 mTtsOutputAvailable = true;
             }
 
@@ -5306,6 +5788,21 @@
             inputDesc->close();
         }
     }
+
+    // Check if spatializer outputs can be closed until used.
+    // mOutputs vector never contains duplicated outputs at this point.
+    std::vector<audio_io_handle_t> outputsClosed;
+    for (size_t i = 0; i < mOutputs.size(); i++) {
+        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+        if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
+                && !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
+            outputsClosed.push_back(desc->mIoHandle);
+            desc->close();
+        }
+    }
+    for (auto output : outputsClosed) {
+        removeOutput(output);
+    }
 }
 
 void AudioPolicyManager::addOutput(audio_io_handle_t output,
@@ -5585,7 +6082,7 @@
             } // endif input != 0
 
             if (input == AUDIO_IO_HANDLE_NONE) {
-                ALOGW("%s could not open input for device %s", __func__,  
+                ALOGW("%s could not open input for device %s", __func__,
                        device->toString().c_str());
                 profiles.removeAt(profile_index);
                 profile_index--;
@@ -5792,7 +6289,7 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
                 && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
-                && !isCallRxAudioSource(sourceDesc)) {
+                && !isCallRxAudioSource(sourceDesc) && !sourceDesc->isInternal()) {
             connectAudioSource(sourceDesc);
         }
     }
@@ -5905,7 +6402,7 @@
                                 newDevices.types());
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != nullptr && !isCallRxAudioSource(source)) {
+            if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
                 connectAudioSource(source);
             }
         }
@@ -5919,6 +6416,12 @@
             for (auto stream :  mEngine->getStreamTypesForProductStrategy(psId)) {
                 mpClientInterface->invalidateStream(stream);
             }
+            for (audio_io_handle_t srcOut : srcOutputs) {
+                sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
+                if (desc == nullptr) continue;
+
+                desc->setTracksInvalidatedStatusByStrategy(psId);
+            }
         }
     }
 }
@@ -5957,14 +6460,20 @@
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
                     secondaryDescs.begin(), secondaryDescs.end())) {
-                std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
-                std::vector<audio_io_handle_t> secondaryOutputIds;
-                for (const auto& secondaryDesc : secondaryDescs) {
-                    secondaryOutputIds.push_back(secondaryDesc->mIoHandle);
-                    weakSecondaryDescs.push_back(secondaryDesc);
+                if (!audio_is_linear_pcm(client->config().format)) {
+                    // If the format is not PCM, the tracks should be invalidated to get correct
+                    // behavior when the secondary output is changed.
+                    streamsToInvalidate.insert(client->stream());
+                } else {
+                    std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
+                    std::vector<audio_io_handle_t> secondaryOutputIds;
+                    for (const auto &secondaryDesc: secondaryDescs) {
+                        secondaryOutputIds.push_back(secondaryDesc->mIoHandle);
+                        weakSecondaryDescs.push_back(secondaryDesc);
+                    }
+                    trackSecondaryOutputs.emplace(client->portId(), secondaryOutputIds);
+                    client->setSecondaryOutputs(std::move(weakSecondaryDescs));
                 }
-                trackSecondaryOutputs.emplace(client->portId(), secondaryOutputIds);
-                client->setSecondaryOutputs(std::move(weakSecondaryDescs));
             }
         }
     }
@@ -5988,6 +6497,18 @@
     return false;
 }
 
+bool AudioPolicyManager::isHearingAidUsedForComm() const {
+    DeviceVector devices = mEngine->getOutputDevicesForStream(AUDIO_STREAM_VOICE_CALL,
+                                                       true /*fromCache*/);
+    for (const auto &device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_HEARING_AID) {
+            return true;
+        }
+    }
+    return false;
+}
+
+
 void AudioPolicyManager::checkA2dpSuspend()
 {
     audio_io_handle_t a2dpOutput = mOutputs.getA2dpOutput();
@@ -6078,9 +6599,10 @@
 
         auto doGetOutputDevicesForVoice = [&]() {
             return hasVoiceStream(streams) && (outputDesc == mPrimaryOutput ||
-                outputDesc->isActive(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) &&
+                outputDesc->isActive(toVolumeSource(AUDIO_STREAM_VOICE_CALL, false))) &&
                 (isInCall() ||
-                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc));
+                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) &&
+                !isStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE, 0);
         };
 
         // With low-latency playing on speaker, music on WFD, when the first low-latency
@@ -6132,11 +6654,11 @@
     uid_t uid;
     sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
     if (topClient != nullptr) {
-      attributes = topClient->attributes();
-      uid = topClient->uid();
+        attributes = topClient->attributes();
+        uid = topClient->uid();
     } else {
-      attributes = { .source = AUDIO_SOURCE_DEFAULT };
-      uid = 0;
+        attributes = { .source = AUDIO_SOURCE_DEFAULT };
+        uid = 0;
     }
 
     if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
@@ -6154,66 +6676,71 @@
     return (stream1 == stream2);
 }
 
-audio_devices_t AudioPolicyManager::getDevicesForStream(audio_stream_type_t stream) {
-    // By checking the range of stream before calling getStrategy, we avoid
-    // getOutputDevicesForStream's behavior for invalid streams.
-    // engine's getOutputDevicesForStream would fallback on its default behavior (most probably
-    // device for music stream), but we want to return the empty set.
-    if (stream < AUDIO_STREAM_MIN || stream >= AUDIO_STREAM_PUBLIC_CNT) {
-        return AUDIO_DEVICE_NONE;
-    }
-    DeviceVector activeDevices;
-    DeviceVector devices;
-    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_PUBLIC_CNT; ++i) {
-        const audio_stream_type_t curStream{static_cast<audio_stream_type_t>(i)};
-        if (!streamsMatchForvolume(stream, curStream)) {
-            continue;
-        }
-        DeviceVector curDevices = mEngine->getOutputDevicesForStream(curStream, false/*fromCache*/);
-        devices.merge(curDevices);
-        for (audio_io_handle_t output : getOutputsForDevices(curDevices, mOutputs)) {
-            sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
-            if (outputDesc->isActive(toVolumeSource(curStream))) {
-                activeDevices.merge(outputDesc->devices());
-            }
-        }
-    }
-
-    // Favor devices selected on active streams if any to report correct device in case of
-    // explicit device selection
-    if (!activeDevices.isEmpty()) {
-        devices = activeDevices;
-    }
-    /*Filter SPEAKER_SAFE out of results, as AudioService doesn't know about it
-      and doesn't really need to.*/
-    DeviceVector speakerSafeDevices = devices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
-    if (!speakerSafeDevices.isEmpty()) {
-        devices.merge(mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
-        devices.remove(speakerSafeDevices);
-    }
-    // FIXME: use DeviceTypeSet when Java layer is ready for it.
-    return deviceTypesToBitMask(devices.types());
-}
-
+// TODO - consider MSD routes b/214971780
 status_t AudioPolicyManager::getDevicesForAttributes(
-        const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices) {
+        const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices, bool forVolume) {
     if (devices == nullptr) {
         return BAD_VALUE;
     }
+
+    // Devices are determined in the following precedence:
+    //
+    // 1) Devices associated with a dynamic policy matching the attributes.  This is often
+    //    a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
+    //
+    // If no such dynamic policy then
+    // 2) Devices containing an active client using setPreferredDevice
+    //    with same strategy as the attributes.
+    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
+    //
+    // If no corresponding active client with setPreferredDevice then
+    // 3) Devices associated with the strategy determined by the attributes
+    //    (from the default Engine::getOutputDevicesForAttributes() implementation).
+    //
+    // See related getOutputForAttrInt().
+
     // check dynamic policies but only for primary descriptors (secondary not used for audible
     // audio routing, only used for duplication for playback capture)
     sp<AudioPolicyMix> policyMix;
     status_t status = mPolicyMixes.getOutputForAttr(attr, 0 /*uid unknown here*/,
-            AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr);
+            AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr /* secondaryMixes */);
     if (status != OK) {
         return status;
     }
-    if (policyMix != nullptr && policyMix->getOutput() != nullptr) {
-        AudioDeviceTypeAddr device(policyMix->mDeviceType, policyMix->mDeviceAddress.c_str());
-        devices->push_back(device);
-        return NO_ERROR;
+
+    DeviceVector curDevices;
+    if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
+            // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
+            // as they are unaffected by device/stream volume
+            // (per SwAudioOutputDescriptor::isFixedVolume()).
+            (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
+            ) {
+        sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
+                policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
+        curDevices.add(deviceDesc);
+    } else {
+        // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
+        // which selects setPreferredDevice if active.  This means forVolume call
+        // will take an active setPreferredDevice, if such exists.
+
+        curDevices = mEngine->getOutputDevicesForAttributes(
+                attr, nullptr /* preferredDevice */, false /* fromCache */);
     }
-    DeviceVector curDevices = mEngine->getOutputDevicesForAttributes(attr, nullptr, false);
+
+    if (forVolume) {
+        // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
+        // for single volume control in AudioService (such relationship should exist if
+        // SPEAKER_SAFE is present).
+        //
+        // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
+        DeviceVector speakerSafeDevices =
+                curDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
+        if (!speakerSafeDevices.isEmpty()) {
+            curDevices.merge(
+                    mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
+            curDevices.remove(speakerSafeDevices);
+        }
+    }
     for (const auto& device : curDevices) {
         devices->push_back(device->getDeviceTypeAddr());
     }
@@ -6274,7 +6801,11 @@
         // mute/unmute AUDIO_STREAM_TTS on all outputs
         ALOGV("\t muting %d", mute);
         uint32_t maxLatency = 0;
-        auto ttsVolumeSource = toVolumeSource(AUDIO_STREAM_TTS);
+        auto ttsVolumeSource = toVolumeSource(AUDIO_STREAM_TTS, false);
+        if (ttsVolumeSource == VOLUME_SOURCE_NONE) {
+            ALOGV("\t no tts volume source available");
+            return 0;
+        }
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
@@ -6356,11 +6887,18 @@
     // different per device volumes
     if (outputDesc->isActive() && (devices != prevDevices)) {
         uint32_t tempMuteWaitMs = outputDesc->latency() * 2;
-        // temporary mute duration is conservatively set to 4 times the reported latency
-        uint32_t tempMuteDurationMs = outputDesc->latency() * 4;
+
         if (muteWaitMs < tempMuteWaitMs) {
             muteWaitMs = tempMuteWaitMs;
         }
+
+        // If recommended duration is defined, replace temporary mute duration to avoid
+        // truncated notifications at beginning, which depends on duration of changing path in HAL.
+        // Otherwise, temporary mute duration is conservatively set to 4 times the reported latency.
+        uint32_t tempRecommendedMuteDuration = outputDesc->getRecommendedMuteDurationMs();
+        uint32_t tempMuteDurationMs = tempRecommendedMuteDuration > 0 ?
+                tempRecommendedMuteDuration : outputDesc->latency() * 4;
+
         for (const auto &activeVs : outputDesc->getActiveVolumeSources()) {
             // make sure that we do not start the temporary mute period too early in case of
             // delayed device change
@@ -6384,7 +6922,7 @@
                                               bool force,
                                               int delayMs,
                                               audio_patch_handle_t *patchHandle,
-                                              bool requiresMuteCheck)
+                                              bool requiresMuteCheck, bool requiresVolumeCheck)
 {
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
     uint32_t muteWaitMs;
@@ -6400,6 +6938,7 @@
     // filter devices according to output selected
     DeviceVector filteredDevices = outputDesc->filterSupportedDevices(devices);
     DeviceVector prevDevices = outputDesc->devices();
+    DeviceVector availPrevDevices = mAvailableOutputDevices.filter(prevDevices);
 
     ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
 
@@ -6415,11 +6954,12 @@
         muteWaitMs = 0;
     }
 
+    bool outputRouted = outputDesc->isRouted();
+
     // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
     // output profile or if new device is not supported AND previous device(s) is(are) still
     // available (otherwise reset device must be done on the output)
-    if (!devices.isEmpty() && filteredDevices.isEmpty() &&
-            !mAvailableOutputDevices.filter(prevDevices).empty()) {
+    if (!devices.isEmpty() && filteredDevices.isEmpty() && !availPrevDevices.empty()) {
         ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
         // restore previous device after evaluating strategy mute state
         outputDesc->setDevices(prevDevices);
@@ -6432,17 +6972,20 @@
     //  AND force is not specified
     //  AND the output is connected by a valid audio patch.
     // Doing this check here allows the caller to call setOutputDevices() without conditions
-    if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) &&
-            !force && outputDesc->getPatchHandle() != 0) {
+    if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) && !force && outputRouted) {
         ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
               filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
+        if (requiresVolumeCheck && !filteredDevices.isEmpty()) {
+            ALOGV("%s setting same device on routed output, force apply volumes", __func__);
+            applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs, true /*force*/);
+        }
         return muteWaitMs;
     }
 
     ALOGV("%s changing device to %s", __func__, filteredDevices.toString().c_str());
 
     // do the routing
-    if (filteredDevices.isEmpty()) {
+    if (filteredDevices.isEmpty() || mAvailableOutputDevices.filter(filteredDevices).empty()) {
         resetOutputDevice(outputDesc, delayMs, NULL);
     } else {
         PatchBuilder patchBuilder;
@@ -6469,6 +7012,9 @@
                                                audio_patch_handle_t *patchHandle)
 {
     ssize_t index;
+    if (patchHandle == nullptr && !outputDesc->isRouted()) {
+        return INVALID_OPERATION;
+    }
     if (patchHandle) {
         index = mAudioPatches.indexOfKey(*patchHandle);
     } else {
@@ -6606,11 +7152,11 @@
     // louder than the accessibility prompt, the prompt cannot be heard, thus masking the touch
     // exploration of the dialer UI. In this situation, bring the accessibility volume closer to
     // the ringtone volume
-    const auto callVolumeSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL);
-    const auto ringVolumeSrc = toVolumeSource(AUDIO_STREAM_RING);
-    const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC);
-    const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM);
-    const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY);
+    const auto callVolumeSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
+    const auto ringVolumeSrc = toVolumeSource(AUDIO_STREAM_RING, false);
+    const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC, false);
+    const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM, false);
+    const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY, false);
 
     if (volumeSource == a11yVolumeSrc
             && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
@@ -6623,12 +7169,12 @@
     // in-call: always cap volume by voice volume + some low headroom
     if ((volumeSource != callVolumeSrc && (isInCall() ||
                                            mOutputs.isActiveLocally(callVolumeSrc))) &&
-            (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM) ||
+            (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM, false) ||
              volumeSource == ringVolumeSrc || volumeSource == musicVolumeSrc ||
              volumeSource == alarmVolumeSrc ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION) ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE) ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_DTMF) ||
+             volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION, false) ||
+             volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false) ||
+             volumeSource == toVolumeSource(AUDIO_STREAM_DTMF, false) ||
              volumeSource == a11yVolumeSrc)) {
         auto &voiceCurves = getVolumeCurves(callVolumeSrc);
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
@@ -6666,9 +7212,9 @@
              AUDIO_DEVICE_OUT_BLE_HEADSET}).empty() &&
             ((volumeSource == alarmVolumeSrc ||
               volumeSource == ringVolumeSrc) ||
-             (volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION)) ||
-             (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM)) ||
-             ((volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
+             (volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION, false)) ||
+             (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM, false)) ||
+             ((volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false)) &&
               (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_NONE))) &&
             curves.canBeMuted()) {
 
@@ -6754,19 +7300,21 @@
                outputDesc->getMuteCount(volumeSource), outputDesc->isActive(volumeSource));
         return NO_ERROR;
     }
-    VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL);
-    VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO);
-    bool isVoiceVolSrc = callVolSrc == volumeSource;
-    bool isBtScoVolSrc = btScoVolSrc == volumeSource;
+    VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
+    VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    bool isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    bool isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
     bool isScoRequested = isScoRequestedForComm();
+    bool isHAUsed = isHearingAidUsedForComm();
+
     // do not change in call volume if bluetooth is connected and vice versa
     // if sco and call follow same curves, bypass forceUseForComm
     if ((callVolSrc != btScoVolSrc) &&
             ((isVoiceVolSrc && isScoRequested) ||
-             (isBtScoVolSrc && !isScoRequested))) {
+             (isBtScoVolSrc && !(isScoRequested || isHAUsed)))) {
         ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
-             volumeSource, isScoRequested ? " " : "n ot ");
+             volumeSource, isScoRequested ? " " : " not ");
         // Do not return an error here as AudioService will always set both voice call
         // and bluetooth SCO volumes due to stream aliasing.
         return NO_ERROR;
@@ -6775,6 +7323,11 @@
         deviceTypes = outputDesc->devices().types();
     }
 
+    if (curves.getVolumeIndexMin() < 0 || curves.getVolumeIndexMax() < 0) {
+        ALOGE("invalid volume index range");
+        return BAD_VALUE;
+    }
+
     float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
     if (outputDesc->isFixedVolume(deviceTypes) ||
             // Force VoIP volume to max for bluetooth SCO device except if muted
@@ -6782,8 +7335,9 @@
                     isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
         volumeDb = 0.0f;
     }
+    const bool muted = (index == 0) && (volumeDb != 0.0f);
     outputDesc->setVolume(
-            volumeDb, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
         float voiceVolume;
@@ -6825,8 +7379,10 @@
     for (auto attributes: mEngine->getAllAttributesForProductStrategy(strategy)) {
         ALOGVV("%s() attributes %s, mute %d, output ID %d", __func__,
                toString(attributes).c_str(), on, outputDesc->getId());
-        VolumeSource source = toVolumeSource(attributes);
-        if (std::find(begin(sourcesToMute), end(sourcesToMute), source) == end(sourcesToMute)) {
+        VolumeSource source = toVolumeSource(attributes, false);
+        if ((source != VOLUME_SOURCE_NONE) &&
+                (std::find(begin(sourcesToMute), end(sourcesToMute), source)
+                        == end(sourcesToMute))) {
             sourcesToMute.push_back(source);
         }
     }
@@ -6849,7 +7405,7 @@
     if (on) {
         if (!outputDesc->isMuted(volumeSource)) {
             if (curves.canBeMuted() &&
-                    (volumeSource != toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE) ||
+                    (volumeSource != toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false) ||
                      (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) ==
                       AUDIO_POLICY_FORCE_NONE))) {
                 checkAndSetVolume(curves, volumeSource, 0, outputDesc, deviceTypes, delayMs);
@@ -6916,13 +7472,11 @@
     return mEngine->getForceUse(usage);
 }
 
-bool AudioPolicyManager::isInCall()
-{
+bool AudioPolicyManager::isInCall() const {
     return isStateInCall(mEngine->getPhoneState());
 }
 
-bool AudioPolicyManager::isStateInCall(int state)
-{
+bool AudioPolicyManager::isStateInCall(int state) const {
     return is_state_in_call(state);
 }
 
@@ -6930,8 +7484,8 @@
 {
     audio_mode_t mode = mEngine->getPhoneState();
     return (mode == AUDIO_MODE_IN_CALL)
-            || (mode == AUDIO_MODE_IN_COMMUNICATION)
-            || (mode == AUDIO_MODE_CALL_SCREEN);
+            || (mode == AUDIO_MODE_CALL_SCREEN)
+            || (mode == AUDIO_MODE_CALL_REDIRECT);
 }
 
 void AudioPolicyManager::cleanUpForDevice(const sp<DeviceDescriptor>& deviceDesc)
@@ -7210,7 +7764,10 @@
         routedDevices.add(device);
     }
     for (const auto& client : activeClients) {
-        // TODO: b/175343099 only travel the valid client
+        if (client->isInvalid()) {
+            // No need to take care about invalidated clients.
+            continue;
+        }
         sp<DeviceDescriptor> preferredDevice =
                 mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId());
         if (mEngine->getOutputDevicesForAttributes(
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index b1454fd..db0ee15 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -35,6 +35,7 @@
 #include <media/PatchBuilder.h>
 #include "AudioPolicyInterface.h"
 
+#include <android/media/audio/common/AudioPort.h>
 #include <AudioPolicyManagerObserver.h>
 #include <AudioPolicyConfig.h>
 #include <PolicyAudioPort.h>
@@ -95,11 +96,8 @@
         virtual ~AudioPolicyManager();
 
         // AudioPolicyInterface
-        virtual status_t setDeviceConnectionState(audio_devices_t device,
-                                                          audio_policy_dev_state_t state,
-                                                          const char *device_address,
-                                                          const char *device_name,
-                                                          audio_format_t encodedFormat);
+        virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
+                const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
                                                                               const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
@@ -124,7 +122,8 @@
                                   audio_port_handle_t *selectedDeviceId,
                                   audio_port_handle_t *portId,
                                   std::vector<audio_io_handle_t> *secondaryOutputs,
-                                  output_type_t *outputType) override;
+                                  output_type_t *outputType,
+                                  bool *isSpatialized) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual bool releaseOutput(audio_port_handle_t portId);
@@ -192,12 +191,37 @@
             return mEngine->getProductStrategyForAttributes(attributes);
         }
 
-        // return the enabled output devices for the given stream type
-        virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
-
+        /**
+         * Returns a vector of devices associated with attributes.
+         *
+         * An AudioTrack opened with specified attributes should play on the returned devices.
+         * If forVolume is set to true, the caller is AudioService, determining the proper
+         * device volume to adjust.
+         *
+         * Devices are determined in the following precedence:
+         * 1) Devices associated with a dynamic policy matching the attributes.  This is often
+         *    a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.  Secondary mixes from a
+         *    dynamic policy are not included.
+         *
+         * If no such dynamic policy then
+         * 2) Devices containing an active client using setPreferredDevice
+         *    with same strategy as the attributes.
+         *    (from the default Engine::getOutputDevicesForAttributes() implementation).
+         *
+         * If no corresponding active client with setPreferredDevice then
+         * 3) Devices associated with the strategy determined by the attributes
+         *    (from the default Engine::getOutputDevicesForAttributes() implementation).
+         *
+         * @param attributes to be considered
+         * @param devices    an AudioDeviceTypeAddrVector container passed in that
+         *                   will be filled on success.
+         * @param forVolume  true if the devices are to be associated with current device volume.
+         * @return           NO_ERROR on success.
+         */
         virtual status_t getDevicesForAttributes(
                 const audio_attributes_t &attributes,
-                AudioDeviceTypeAddrVector *devices);
+                AudioDeviceTypeAddrVector *devices,
+                bool forVolume);
 
         virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc = NULL);
         virtual status_t registerEffect(const effect_descriptor_t *desc,
@@ -240,10 +264,7 @@
         virtual status_t getAudioPort(struct audio_port_v7 *port);
         virtual status_t createAudioPatch(const struct audio_patch *patch,
                                            audio_patch_handle_t *handle,
-                                           uid_t uid) {
-            return createAudioPatchInternal(patch, handle, uid);
-        }
-
+                                           uid_t uid);
         virtual status_t releaseAudioPatch(audio_patch_handle_t handle,
                                               uid_t uid);
         virtual status_t listAudioPatches(unsigned int *num_patches,
@@ -320,13 +341,15 @@
                                                     audio_format_t *surroundFormats);
         virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
-        virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                    std::vector<audio_format_t> *formats);
+        virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+                    audio_devices_t device, std::vector<audio_format_t> *formats);
 
         virtual void setAppState(audio_port_handle_t portId, app_state_t state);
 
         virtual bool isHapticPlaybackSupported();
 
+        virtual bool isUltrasoundSupported();
+
         virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
         {
             return mEngine->listAudioProductStrategies(strategies);
@@ -368,6 +391,12 @@
 
         virtual status_t releaseSpatializerOutput(audio_io_handle_t output);
 
+        virtual audio_direct_mode_t getDirectPlaybackSupport(const audio_attributes_t *attr,
+                                                             const audio_config_t *config);
+
+        virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
+                                                         AudioProfileVector& audioProfiles);
+
         bool isCallScreenModeSupported() override;
 
         void onNewAudioModulesAvailable() override;
@@ -433,13 +462,30 @@
         {
             return static_cast<VolumeSource>(volumeGroup);
         }
-        VolumeSource toVolumeSource(const audio_attributes_t &attributes) const
+        /**
+         * @brief toVolumeSource converts an audio attributes into a volume source
+         * (either a legacy stream or a volume group). If fallback on default is allowed, and if
+         * the audio attributes do not follow any specific product strategy's rule, it will be
+         * associated to default volume source, e.g. music. Thus, any of call of volume API
+         * using this translation function may affect the default volume source.
+         * If fallback is not allowed and no matching rule is identified for the given attributes,
+         * the volume source will be undefined, thus, no volume will be altered/modified.
+         * @param attributes to be considered
+         * @param fallbackOnDefault
+         * @return volume source associated with given attributes, otherwise either music if
+         * fallbackOnDefault is set or none.
+         */
+        VolumeSource toVolumeSource(
+            const audio_attributes_t &attributes, bool fallbackOnDefault = true) const
         {
-            return toVolumeSource(mEngine->getVolumeGroupForAttributes(attributes));
+            return toVolumeSource(mEngine->getVolumeGroupForAttributes(
+                attributes, fallbackOnDefault));
         }
-        VolumeSource toVolumeSource(audio_stream_type_t stream) const
+        VolumeSource toVolumeSource(
+            audio_stream_type_t stream, bool fallbackOnDefault = true) const
         {
-            return toVolumeSource(mEngine->getVolumeGroupForStreamType(stream));
+            return toVolumeSource(mEngine->getVolumeGroupForStreamType(
+                stream, fallbackOnDefault));
         }
         IVolumeCurves &getVolumeCurves(VolumeSource volumeSource)
         {
@@ -465,14 +511,27 @@
         void removeOutput(audio_io_handle_t output);
         void addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc);
 
-        // change the route of the specified output. Returns the number of ms we have slept to
-        // allow new routing to take effect in certain cases.
+        /**
+         * @brief setOutputDevices change the route of the specified output.
+         * @param outputDesc to be considered
+         * @param device to be considered to route the output
+         * @param force if true, force the routing even if no change.
+         * @param delayMs if specified, delay to apply for mute/volume op when changing device
+         * @param patchHandle if specified, the patch handle this output is connected through.
+         * @param requiresMuteCheck if specified, for e.g. when another output is on a shared device
+         *        and currently active, allow to have proper drain and avoid pops
+         * @param requiresVolumeCheck true if called requires to reapply volume if the routing did
+         * not change (but the output is still routed).
+         * @return the number of ms we have slept to allow new routing to take effect in certain
+         * cases.
+         */
         uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
                                   bool force = false,
                                   int delayMs = 0,
                                   audio_patch_handle_t *patchHandle = NULL,
-                                  bool requiresMuteCheck = true);
+                                  bool requiresMuteCheck = true,
+                                  bool requiresVolumeCheck = false);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
@@ -539,9 +598,9 @@
         audio_mode_t getPhoneState();
 
         // true if device is in a telephony or VoIP call
-        virtual bool isInCall();
+        virtual bool isInCall() const;
         // true if given state represents a device in a telephony or VoIP call
-        virtual bool isStateInCall(int state);
+        virtual bool isStateInCall(int state) const;
         // true if playback to call TX or capture from call RX is possible
         bool isCallAudioAccessible();
 
@@ -577,13 +636,22 @@
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0);
 
         bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallRxSourceClientPort != AUDIO_PORT_HANDLE_NONE
-                && source == mAudioSources.valueFor(mCallRxSourceClientPort);
+            return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
         }
 
         void connectTelephonyRxAudioSource();
 
-        void disconnectTelephonyRxAudioSource();
+        void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
+
+        void connectTelephonyTxAudioSource(const sp<DeviceDescriptor> &srcdevice,
+                                           const sp<DeviceDescriptor> &sinkDevice,
+                                           uint32_t delayMs);
+
+        bool isTelephonyRxOrTx(const sp<SwAudioOutputDescriptor>& desc) const {
+            return (mCallRxSourceClient != nullptr && mCallRxSourceClient->belongsToOutput(desc))
+                    || (mCallTxSourceClient != nullptr
+                    &&  mCallTxSourceClient->belongsToOutput(desc));
+        }
 
         /**
          * @brief updates routing for all inputs.
@@ -701,6 +769,15 @@
                                           audio_channel_mask_t channelMask,
                                           audio_output_flags_t flags,
                                           bool directOnly);
+        /**
+        * Same as getProfileForOutput, but it looks for an MSD profile
+        */
+        sp<IOProfile> getMsdProfileForOutput(const DeviceVector &devices,
+                                           uint32_t samplingRate,
+                                           audio_format_t format,
+                                           audio_channel_mask_t channelMask,
+                                           audio_output_flags_t flags,
+                                           bool directOnly);
 
         audio_io_handle_t selectOutputForMusicEffects();
 
@@ -790,6 +867,12 @@
         status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
         status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
 
+        status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
+                                          const sp<DeviceDescriptor> &sinkDevice,
+                                          const struct audio_patch *patch,
+                                          audio_patch_handle_t &handle,
+                                          uid_t uid, uint32_t delayMs);
+
         sp<SourceClientDescriptor> getSourceForAttributesOnOutput(audio_io_handle_t output,
                                                                   const audio_attributes_t &attr);
         void clearAudioSourcesForOutput(audio_io_handle_t output);
@@ -804,6 +887,21 @@
         void closeActiveClients(const sp<AudioInputDescriptor>& input);
         void closeClient(audio_port_handle_t portId);
 
+        /**
+         * @brief isAnyDeviceTypeActive: returns true if at least one active client is routed to
+         * one of the specified devices
+         * @param deviceTypes list of devices to consider
+         */
+        bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
+        /**
+         * @brief isLeUnicastActive: returns true if a call is active or at least one active client
+         * is routed to a LE unicast device
+         */
+        bool isLeUnicastActive() const;
+
+        void checkLeBroadcastRoutes(bool wasUnicastActive,
+                sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
+
         const uid_t mUidCached;                         // AID_AUDIOSERVER
         AudioPolicyClientInterface *mpClientInterface;  // audio policy client interface
         sp<SwAudioOutputDescriptor> mPrimaryOutput;     // primary output descriptor
@@ -840,8 +938,6 @@
 
         SoundTriggerSessionCollection mSoundTriggerSessions;
 
-        sp<AudioPatch> mCallTxPatch;
-
         HwAudioOutputCollection mHwOutputs;
         SourceClientCollection mAudioSources;
 
@@ -856,7 +952,8 @@
         uint32_t mBeaconMuteRefCount;   // ref count for stream that would mute beacon
         uint32_t mBeaconPlayingRefCount;// ref count for the playing beacon streams
         bool mBeaconMuted;              // has STREAM_TTS been muted
-        bool mTtsOutputAvailable;       // true if a dedicated output for TTS stream is available
+        // true if a dedicated output for TTS stream or Ultrasound is available
+        bool mTtsOutputAvailable;
 
         bool mMasterMono;               // true if we wish to force all outputs to mono
         AudioPolicyMixCollection mPolicyMixes; // list of registered mixes
@@ -881,7 +978,8 @@
 
         // The port handle of the hardware audio source created internally for the Call RX audio
         // end point.
-        audio_port_handle_t mCallRxSourceClientPort = AUDIO_PORT_HANDLE_NONE;
+        sp<SourceClientDescriptor> mCallRxSourceClient;
+        sp<SourceClientDescriptor> mCallTxSourceClient;
 
         // Support for Multi-Stream Decoder (MSD) module
         sp<DeviceDescriptor> getMsdAudioInDevice() const;
@@ -902,7 +1000,24 @@
         PatchBuilder buildMsdPatch(bool msdIsSource, const sp<DeviceDescriptor> &device) const;
         status_t setMsdOutputPatches(const DeviceVector *outputDevices = nullptr);
         void releaseMsdOutputPatches(const DeviceVector& devices);
+        bool msdHasPatchesToAllDevices(const AudioDeviceTypeAddrVector& devices);
+
+        // Overload of setDeviceConnectionState()
+        status_t setDeviceConnectionState(audio_devices_t deviceType,
+                                          audio_policy_dev_state_t state,
+                                          const char* device_address, const char* device_name,
+                                          audio_format_t encodedFormat);
+
+        // Called by setDeviceConnectionState()
+        status_t deviceToAudioPort(audio_devices_t deviceType, const char* device_address,
+                                   const char* device_name, media::AudioPort* aidPort);
+        bool isMsdPatch(const audio_patch_handle_t &handle) const;
+
 private:
+        sp<SourceClientDescriptor> startAudioSourceInternal(
+                const struct audio_port_config *source, const audio_attributes_t *attributes,
+                uid_t uid);
+
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
         // Add or remove AC3 DTS encodings based on user preferences.
@@ -942,7 +1057,8 @@
                 audio_port_handle_t *selectedDeviceId,
                 bool *isRequestedDeviceForExclusiveUse,
                 std::vector<sp<AudioPolicyMix>> *secondaryMixes,
-                output_type_t *outputType);
+                output_type_t *outputType,
+                bool *isSpatialized);
         // internal method to return the output handle for the given device and format
         audio_io_handle_t getOutputForDevices(
                 const DeviceVector &devices,
@@ -950,6 +1066,7 @@
                 const audio_attributes_t *attr,
                 const audio_config_t *config,
                 audio_output_flags_t *flags,
+                bool *isSpatialized,
                 bool forceMutingHaptic = false);
 
         // Internal method checking if a direct output can be opened matching the requested
@@ -976,25 +1093,28 @@
          * @param attr audio attributes describing the playback use case
          * @param config audio configuration describing the audio format, channels, sample rate...
          * @param devices the sink audio device selected for playback
-         * @param allowCurrentOutputReconfig if true, the result will be considering it is possible
-         *      to close and reopen an existing spatializer output stream to match the requested
-         *      criteria. If false, the criteria must be compatible with the opened sptializer
-         *      output.
          * @return true if spatialization is possible for this context, false otherwise.
          */
         virtual bool canBeSpatializedInt(const audio_attributes_t *attr,
                                       const audio_config_t *config,
-                                      const AudioDeviceTypeAddrVector &devices,
-                                      bool allowCurrentOutputReconfig = true) const;
+                                      const AudioDeviceTypeAddrVector &devices) const;
 
         sp<IOProfile> getSpatializerOutputProfile(const audio_config_t *config,
                                                   const AudioDeviceTypeAddrVector &devices) const;
 
-        static bool isChannelMaskSpatialized(audio_channel_mask_t channels);
-
         void checkVirtualizerClientRoutes();
 
         /**
+         * @brief Returns true if at least one device can only be reached via the output passed
+         * as argument. Always returns false for duplicated outputs.
+         * This can be used to decide if an output can be closed without forbidding
+         * playback to any given device.
+         * @param outputDesc the output to consider
+         * @return true if at least one device can only be reached via the output.
+         */
+        bool isOutputOnlyAvailableRouteToSomeDevice(const sp<SwAudioOutputDescriptor>& outputDesc);
+
+        /**
          * @brief getInputForDevice selects an input handle for a given input device and
          * requester context
          * @param device to be used by requester, selected by policy mix rules or engine
@@ -1021,6 +1141,9 @@
         bool     isValidAttributes(const audio_attributes_t *paa);
 
         // Called by setDeviceConnectionState().
+        status_t setDeviceConnectionStateInt(audio_policy_dev_state_t state,
+                                             const android::media::audio::common::AudioPort& port,
+                                             audio_format_t encodedFormat);
         status_t setDeviceConnectionStateInt(audio_devices_t deviceType,
                                              audio_policy_dev_state_t state,
                                              const char *device_address,
@@ -1044,21 +1167,25 @@
          * @param[out] handle patch handle to be provided if patch installed correctly
          * @param[in] uid of the client
          * @param[in] delayMs if required
-         * @param[in] sourceDesc [optional] in case of external source, source client to be
-         * configured by the patch, i.e. assigning an Output (HW or SW)
+         * @param[in] sourceDesc source client to be configured when creating the patch, i.e.
+         *            assigning an Output (HW or SW) used for volume control.
          * @return NO_ERROR if patch installed correctly, error code otherwise.
          */
         status_t createAudioPatchInternal(const struct audio_patch *patch,
                                           audio_patch_handle_t *handle,
-                                          uid_t uid, uint32_t delayMs = 0,
-                                          const sp<SourceClientDescriptor>& sourceDesc = nullptr);
+                                          uid_t uid, uint32_t delayMs,
+                                          const sp<SourceClientDescriptor>& sourceDesc);
         /**
          * @brief releaseAudioPatchInternal internal function to remove an audio patch
          * @param[in] handle of the patch to be removed
          * @param[in] delayMs if required
+         * @param[in] sourceDesc [optional] in case of external source, source client to be
+         * unrouted from the patch, i.e. assigning an Output (HW or SW)
          * @return NO_ERROR if patch removed correctly, error code otherwise.
          */
-        status_t releaseAudioPatchInternal(audio_patch_handle_t handle, uint32_t delayMs = 0);
+        status_t releaseAudioPatchInternal(audio_patch_handle_t handle,
+                                           uint32_t delayMs = 0,
+                                           const sp<SourceClientDescriptor>& sourceDesc = nullptr);
 
         status_t installPatch(const char *caller,
                 audio_patch_handle_t *patchHandle,
@@ -1080,6 +1207,8 @@
 
         bool isScoRequestedForComm() const;
 
+        bool isHearingAidUsedForComm() const;
+
         bool areAllActiveTracksRerouted(const sp<SwAudioOutputDescriptor>& output);
 
         /**
@@ -1097,6 +1226,28 @@
                 const sp<IOProfile>& profile, const DeviceVector& devices,
                 const audio_config_base_t *mixerConfig = nullptr);
 
+        bool isOffloadPossible(const audio_offload_info_t& offloadInfo,
+                               bool durationIgnored = false);
+
+        // adds the profiles from the outputProfile to the passed audioProfilesVector
+        // without duplicating them if already present
+        void addPortProfilesToVector(sp<IOProfile> outputProfile,
+                                    AudioProfileVector& audioProfilesVector);
+
+        // Searches for a compatible profile with the sample rate, audio format and channel mask
+        // in the list of passed HwModule(s).
+        // returns a compatible profile if found, nullptr otherwise
+        sp<IOProfile> searchCompatibleProfileHwModules (
+                                            const HwModuleCollection& hwModules,
+                                            const DeviceVector& devices,
+                                            uint32_t samplingRate,
+                                            audio_format_t format,
+                                            audio_channel_mask_t channelMask,
+                                            audio_output_flags_t flags,
+                                            bool directOnly);
+
+        // Filters only the relevant flags for getProfileForOutput
+        audio_output_flags_t getRelevantFlags (audio_output_flags_t flags, bool directOnly);
 };
 
 };
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 197f183..cdad9a6 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -49,12 +49,14 @@
         "libshmemcompat",
         "libutils",
         "libstagefright_foundation",
+        "android.media.audio.common.types-V1-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
         "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
         "spatializer-aidl-cpp",
     ],
 
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 79252d4..c766a15 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -56,17 +56,18 @@
     media::OpenOutputResponse response;
 
     request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
-    request.halConfig = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(*halConfig));
-    request.mixerConfig =
-            VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig));
+    request.halConfig = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(*halConfig, false /*isInput*/));
+    request.mixerConfig = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig, false /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
     request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
 
     status_t status = af->openOutput(request, &response);
     if (status == OK) {
         *output = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.output));
-        *halConfig =
-                VALUE_OR_RETURN_STATUS(aidl2legacy_AudioConfig_audio_config_t(response.config));
+        *halConfig = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioConfig_audio_config_t(response.config, false /*isInput*/));
         *latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(response.latencyMs));
     }
     return status;
@@ -135,9 +136,10 @@
     media::OpenInputRequest request;
     request.module = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_module_handle_t_int32_t(module));
     request.input = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(*input));
-    request.config = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_config_t_AudioConfig(*config));
+    request.config = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioDeviceTypeAddress(deviceTypeAddr));
-    request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSourceType(source));
+    request.source = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_source_t_AudioSource(source));
     request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_input_flags_t_int32_t_mask(flags));
 
     media::OpenInputResponse response;
@@ -275,6 +277,11 @@
     mAudioPolicyService->onRoutingUpdated();
 }
 
+void AudioPolicyService::AudioPolicyClient::onVolumeRangeInitRequest()
+{
+    mAudioPolicyService->onVolumeRangeInitRequest();
+}
+
 audio_unique_id_t AudioPolicyService::AudioPolicyClient::newAudioUniqueId(audio_unique_id_use_t use)
 {
     return AudioSystem::newAudioUniqueId(use);
@@ -305,4 +312,15 @@
     return af->updateSecondaryOutputs(trackSecondaryOutputs);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setDeviceConnectedState(
+        const struct audio_port_v7 *port, bool connected) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return PERMISSION_DENIED;
+    }
+    return af->setDeviceConnectedState(port, connected);
+}
+
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 3f01de9..70fdfcb 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -345,7 +345,8 @@
             (source > AUDIO_SOURCE_MAX &&
              source != AUDIO_SOURCE_HOTWORD &&
              source != AUDIO_SOURCE_FM_TUNER &&
-             source != AUDIO_SOURCE_ECHO_REFERENCE)) {
+             source != AUDIO_SOURCE_ECHO_REFERENCE &&
+             source != AUDIO_SOURCE_ULTRASOUND)) {
         ALOGE("addSourceDefaultEffect(): Unsupported source type %d", source);
         return BAD_VALUE;
     }
@@ -386,7 +387,7 @@
         return res;
     }
     EffectDesc *effect = new EffectDesc(
-            descriptor.name, *type, opPackageName, *uuid, priority, *id);
+            descriptor.name, descriptor.type, opPackageName, descriptor.uuid, priority, *id);
     desc->mEffects.add(effect);
     // TODO(b/71813697): Support setting params as well.
 
@@ -451,7 +452,7 @@
         return res;
     }
     EffectDesc *effect = new EffectDesc(
-            descriptor.name, *type, opPackageName, *uuid, priority, *id);
+            descriptor.name, descriptor.type, opPackageName, descriptor.uuid, priority, *id);
     desc->mEffects.add(effect);
     // TODO(b/71813697): Support setting params as well.
 
@@ -544,6 +545,7 @@
     CAMCORDER_SRC_TAG,
     VOICE_REC_SRC_TAG,
     VOICE_COMM_SRC_TAG,
+    REMOTE_SUBMIX_SRC_TAG,
     UNPROCESSED_SRC_TAG,
     VOICE_PERFORMANCE_SRC_TAG
 };
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 4bd1260..df49bba 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -32,6 +32,9 @@
        if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
        std::move(_tmp.value()); })
 
+#define RETURN_BINDER_STATUS_IF_ERROR(x) \
+    if (status_t _tmp = (x); _tmp != OK) return aidl_utils::binderStatusFromStatusT(_tmp);
+
 #define RETURN_IF_BINDER_ERROR(x)      \
     {                                  \
         binder::Status _tmp = (x);     \
@@ -44,6 +47,19 @@
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
 using content::AttributionSourceState;
+using media::audio::common::AudioConfig;
+using media::audio::common::AudioConfigBase;
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioMode;
+using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioUsage;
+using media::audio::common::AudioUuid;
+using media::audio::common::Int;
 
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
     AUDIO_USAGE_CALL_ASSISTANT,
@@ -63,17 +79,24 @@
         != std::end(mSupportedSystemUsages);
 }
 
-status_t AudioPolicyService::validateUsage(audio_usage_t usage) {
-     return validateUsage(usage, getCallingAttributionSource());
+status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
+     return validateUsage(attr, getCallingAttributionSource());
 }
 
-status_t AudioPolicyService::validateUsage(audio_usage_t usage,
+status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr,
         const AttributionSourceState& attributionSource) {
-    if (isSystemUsage(usage)) {
-        if (isSupportedSystemUsage(usage)) {
-            if (!modifyAudioRoutingAllowed(attributionSource)) {
-                ALOGE(("permission denied: modify audio routing not allowed "
-                       "for attributionSource %s"), attributionSource.toString().c_str());
+    if (isSystemUsage(attr.usage)) {
+        if (isSupportedSystemUsage(attr.usage)) {
+            if (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
+                    && ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)) {
+                if (!callAudioInterceptionAllowed(attributionSource)) {
+                    ALOGE("%s: call audio interception not allowed for attribution source: %s",
+                           __func__, attributionSource.toString().c_str());
+                    return PERMISSION_DENIED;
+                }
+            } else if (!modifyAudioRoutingAllowed(attributionSource)) {
+                ALOGE("%s: modify audio routing not allowed for attribution source: %s",
+                        __func__, attributionSource.toString().c_str());
                 return PERMISSION_DENIED;
             }
         } else {
@@ -96,16 +119,13 @@
 }
 
 Status AudioPolicyService::setDeviceConnectionState(
-        const media::AudioDevice& deviceAidl,
         media::AudioPolicyDeviceState stateAidl,
-        const std::string& deviceNameAidl,
-        media::audio::common::AudioFormat encodedFormatAidl) {
-    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl.type));
+        const android::media::audio::common::AudioPort& port,
+        const AudioFormatDescription& encodedFormatAidl) {
     audio_policy_dev_state_t state = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPolicyDeviceState_audio_policy_dev_state_t(stateAidl));
     audio_format_t encodedFormat = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioFormat_audio_format_t(encodedFormatAidl));
+            aidl2legacy_AudioFormatDescription_audio_format_t(encodedFormatAidl));
 
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
@@ -121,42 +141,45 @@
     ALOGV("setDeviceConnectionState()");
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
-    status_t status = mAudioPolicyManager->setDeviceConnectionState(device, state,
-                                                          deviceAidl.address.c_str(),
-                                                          deviceNameAidl.c_str(),
-                                                          encodedFormat);
+    status_t status = mAudioPolicyManager->setDeviceConnectionState(
+            state, port, encodedFormat);
     if (status == NO_ERROR) {
         onCheckSpatializer_l();
     }
     return binderStatusFromStatusT(status);
 }
 
-Status AudioPolicyService::getDeviceConnectionState(const media::AudioDevice& deviceAidl,
+Status AudioPolicyService::getDeviceConnectionState(const AudioDevice& deviceAidl,
                                                     media::AudioPolicyDeviceState* _aidl_return) {
-    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl.type));
+    audio_devices_t device;
+    std::string address;
+    RETURN_BINDER_STATUS_IF_ERROR(
+            aidl2legacy_AudioDevice_audio_device(deviceAidl, &device, &address));
     if (mAudioPolicyManager == NULL) {
         *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(
                         AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
         return Status::ok();
     }
+    Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(
-                    mAudioPolicyManager->getDeviceConnectionState(device,
-                                                                  deviceAidl.address.c_str())));
+                    mAudioPolicyManager->getDeviceConnectionState(
+                            device, address.c_str())));
     return Status::ok();
 }
 
 Status AudioPolicyService::handleDeviceConfigChange(
-        const media::AudioDevice& deviceAidl,
+        const AudioDevice& deviceAidl,
         const std::string& deviceNameAidl,
-        media::audio::common::AudioFormat encodedFormatAidl) {
-    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl.type));
+        const AudioFormatDescription& encodedFormatAidl) {
+    audio_devices_t device;
+    std::string address;
+    RETURN_BINDER_STATUS_IF_ERROR(
+            aidl2legacy_AudioDevice_audio_device(deviceAidl, &device, &address));
     audio_format_t encodedFormat = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioFormat_audio_format_t(encodedFormatAidl));
+            aidl2legacy_AudioFormatDescription_audio_format_t(encodedFormatAidl));
 
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
@@ -169,7 +192,7 @@
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     status_t status =  mAudioPolicyManager->handleDeviceConfigChange(
-            device, deviceAidl.address.c_str(), deviceNameAidl.c_str(), encodedFormat);
+            device, address.c_str(), deviceNameAidl.c_str(), encodedFormat);
 
     if (status == NO_ERROR) {
        onCheckSpatializer_l();
@@ -177,7 +200,7 @@
     return binderStatusFromStatusT(status);
 }
 
-Status AudioPolicyService::setPhoneState(media::AudioMode stateAidl, int32_t uidAidl)
+Status AudioPolicyService::setPhoneState(AudioMode stateAidl, int32_t uidAidl)
 {
     audio_mode_t state = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioMode_audio_mode_t(stateAidl));
@@ -199,7 +222,15 @@
     // can be interleaved).
     Mutex::Autolock _l(mLock);
     // TODO: check if it is more appropriate to do it in platform specific policy manager
-    AudioSystem::setMode(state);
+
+    // Audio HAL mode conversion for call redirect modes
+    audio_mode_t halMode = state;
+    if (state == AUDIO_MODE_CALL_REDIRECT) {
+        halMode = AUDIO_MODE_CALL_SCREEN;
+    } else if (state == AUDIO_MODE_COMMUNICATION_REDIRECT) {
+        halMode = AUDIO_MODE_NORMAL;
+    }
+    AudioSystem::setMode(halMode);
 
     AutoCallerClear acc;
     mAudioPolicyManager->setPhoneState(state);
@@ -209,7 +240,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getPhoneState(media::AudioMode* _aidl_return) {
+Status AudioPolicyService::getPhoneState(AudioMode* _aidl_return) {
     Mutex::Autolock _l(mLock);
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_mode_t_AudioMode(mPhoneState));
     return Status::ok();
@@ -265,7 +296,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getOutput(media::AudioStreamType streamAidl, int32_t* _aidl_return)
+Status AudioPolicyService::getOutput(AudioStreamType streamAidl, int32_t* _aidl_return)
 {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
@@ -289,7 +320,7 @@
 Status AudioPolicyService::getOutputForAttr(const media::AudioAttributesInternal& attrAidl,
                                             int32_t sessionAidl,
                                             const AttributionSourceState& attributionSource,
-                                            const media::AudioConfig& configAidl,
+                                            const AudioConfig& configAidl,
                                             int32_t flagsAidl,
                                             int32_t selectedDeviceIdAidl,
                                             media::GetOutputForAttrResponse* _aidl_return)
@@ -300,7 +331,7 @@
             aidl2legacy_int32_t_audio_session_t(sessionAidl));
     audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
     audio_config_t config = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioConfig_audio_config_t(configAidl));
+            aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
     audio_output_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_output_flags_t_mask(flagsAidl));
     audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
@@ -316,7 +347,7 @@
 
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage, attributionSource)));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr, attributionSource)));
 
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
@@ -341,15 +372,26 @@
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
+
+    if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
+        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+            ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
+                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+            return binderStatusFromStatusT(PERMISSION_DENIED);
+        }
+    }
+
     AutoCallerClear acc;
     AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized = false;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
                                                             adjAttributionSource,
                                                             &config,
                                                             &flags, &selectedDeviceId, &portId,
                                                             &secondaryOutputs,
-                                                            &outputType);
+                                                            &outputType,
+                                                            &isSpatialized);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -358,7 +400,12 @@
         case AudioPolicyInterface::API_OUTPUT_LEGACY:
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
-            if (!modifyPhoneStateAllowed(adjAttributionSource)) {
+            if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
+                && !callAudioInterceptionAllowed(adjAttributionSource)) {
+                ALOGE("%s() permission denied: call redirection not allowed for uid %d",
+                    __func__, adjAttributionSource.uid);
+                result = PERMISSION_DENIED;
+            } else if (!modifyPhoneStateAllowed(adjAttributionSource)) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
                     __func__, adjAttributionSource.uid);
                 result = PERMISSION_DENIED;
@@ -381,7 +428,7 @@
     if (result == NO_ERROR) {
         sp<AudioPlaybackClient> client =
                 new AudioPlaybackClient(attr, output, adjAttributionSource, session,
-                    portId, selectedDeviceId, stream);
+                    portId, selectedDeviceId, stream, isSpatialized);
         mAudioPlaybackClients.add(portId, client);
 
         _aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
@@ -395,6 +442,7 @@
         _aidl_return->secondaryOutputs = VALUE_OR_RETURN_BINDER_STATUS(
                 convertContainer<std::vector<int32_t>>(secondaryOutputs,
                                                        legacy2aidl_audio_io_handle_t_int32_t));
+        _aidl_return->isSpatialized = isSpatialized;
     }
     return binderStatusFromStatusT(result);
 }
@@ -440,6 +488,7 @@
     status_t status = mAudioPolicyManager->startOutput(portId);
     if (status == NO_ERROR) {
         client->active = true;
+        onUpdateActiveSpatializerTracks_l();
     }
     return binderStatusFromStatusT(status);
 }
@@ -477,6 +526,7 @@
     status_t status = mAudioPolicyManager->stopOutput(portId);
     if (status == NO_ERROR) {
         client->active = false;
+        onUpdateActiveSpatializerTracks_l();
     }
     return status;
 }
@@ -507,8 +557,10 @@
             client->io, client->stream, client->session);
     }
     Mutex::Autolock _l(mLock);
+    if (client != nullptr && client->active) {
+        onUpdateActiveSpatializerTracks_l();
+    }
     mAudioPlaybackClients.removeItem(portId);
-
     // called from internal thread: no need to clear caller identity
     mAudioPolicyManager->releaseOutput(portId);
 }
@@ -518,7 +570,7 @@
                                            int32_t riidAidl,
                                            int32_t sessionAidl,
                                            const AttributionSourceState& attributionSource,
-                                           const media::AudioConfigBase& configAidl,
+                                           const AudioConfigBase& configAidl,
                                            int32_t flagsAidl,
                                            int32_t selectedDeviceIdAidl,
                                            media::GetInputForAttrResponse* _aidl_return) {
@@ -531,7 +583,7 @@
     audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_session_t(sessionAidl));
     audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl));
+            aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl, true /*isInput*/));
     audio_input_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_input_flags_t_mask(flagsAidl));
     audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
@@ -556,7 +608,8 @@
             || (inputSource >= AUDIO_SOURCE_CNT
                 && inputSource != AUDIO_SOURCE_HOTWORD
                 && inputSource != AUDIO_SOURCE_FM_TUNER
-                && inputSource != AUDIO_SOURCE_ECHO_REFERENCE)) {
+                && inputSource != AUDIO_SOURCE_ECHO_REFERENCE
+                && inputSource != AUDIO_SOURCE_ULTRASOUND)) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
@@ -585,32 +638,43 @@
         adjAttributionSource.pid = callingPid;
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr.usage,
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
             adjAttributionSource)));
 
     // check calling permissions.
-    // Capturing from FM_TUNER source is controlled by captureTunerAudioInputAllowed() and
-    // captureAudioOutputAllowed() (deprecated) as this does not affect users privacy
-    // as does capturing from an actual microphone.
-    if (!(recordingAllowed(adjAttributionSource, attr.source)
-            || attr.source == AUDIO_SOURCE_FM_TUNER)) {
+    // Capturing from the following sources does not require permission RECORD_AUDIO
+    // as the captured audio does not come from a microphone:
+    // - FM_TUNER source is controlled by captureTunerAudioInputAllowed() or
+    // captureAudioOutputAllowed() (deprecated).
+    // - REMOTE_SUBMIX source is controlled by captureAudioOutputAllowed() if the input
+    // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
+    // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
+    // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
+    if (!(recordingAllowed(adjAttributionSource, inputSource)
+            || inputSource == AUDIO_SOURCE_FM_TUNER
+            || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
+            || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
         ALOGE("%s permission denied: recording not allowed for %s",
                 __func__, adjAttributionSource.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
-    if ((inputSource == AUDIO_SOURCE_VOICE_UPLINK ||
-        inputSource == AUDIO_SOURCE_VOICE_DOWNLINK ||
-        inputSource == AUDIO_SOURCE_VOICE_CALL ||
-        inputSource == AUDIO_SOURCE_ECHO_REFERENCE)
-        && !canCaptureOutput) {
+    bool canInterceptCallAudio = callAudioInterceptionAllowed(adjAttributionSource);
+    bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
+             || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
+             || inputSource == AUDIO_SOURCE_VOICE_CALL;
+
+    if (isCallAudioSource && !canInterceptCallAudio && !canCaptureOutput) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
-
+    if (inputSource == AUDIO_SOURCE_ECHO_REFERENCE
+            && !canCaptureOutput) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
-        && !captureTunerAudioInputAllowed(adjAttributionSource)
-        && !canCaptureOutput) {
+        && !canCaptureOutput
+        && !captureTunerAudioInputAllowed(adjAttributionSource)) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -626,6 +690,14 @@
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
+    if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
+        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+            ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
+                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+            return binderStatusFromStatusT(PERMISSION_DENIED);
+        }
+    }
+
     sp<AudioPolicyEffects>audioPolicyEffects;
     {
         status_t status;
@@ -652,23 +724,30 @@
             case AudioPolicyInterface::API_INPUT_LEGACY:
                 break;
             case AudioPolicyInterface::API_INPUT_TELEPHONY_RX:
+                if ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
+                        && canInterceptCallAudio) {
+                    break;
+                }
                 // FIXME: use the same permission as for remote submix for now.
+                FALLTHROUGH_INTENDED;
             case AudioPolicyInterface::API_INPUT_MIX_CAPTURE:
                 if (!canCaptureOutput) {
-                    ALOGE("getInputForAttr() permission denied: capture not allowed");
+                    ALOGE("%s permission denied: capture not allowed", __func__);
                     status = PERMISSION_DENIED;
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
-                    ALOGE("getInputForAttr() permission denied: modify audio routing not allowed");
+                if (!(modifyAudioRoutingAllowed(adjAttributionSource)
+                        || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
+                            && canInterceptCallAudio))) {
+                    ALOGE("%s permission denied for remote submix capture", __func__);
                     status = PERMISSION_DENIED;
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_INVALID:
             default:
-                LOG_ALWAYS_FATAL("getInputForAttr() encountered an invalid input type %d",
-                        (int)inputType);
+                LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
+                        __func__, (int)inputType);
             }
         }
 
@@ -738,8 +817,10 @@
 
     // check calling permissions
     if (!(startRecording(client->attributionSource, String16(msg.str().c_str()),
-        client->attributes.source)
-            || client->attributes.source == AUDIO_SOURCE_FM_TUNER)) {
+                         client->attributes.source)
+            || client->attributes.source == AUDIO_SOURCE_FM_TUNER
+            || client->attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX
+            || client->attributes.source == AUDIO_SOURCE_ECHO_REFERENCE)) {
         ALOGE("%s permission denied: recording not allowed for attribution source %s",
                 __func__, client->attributionSource.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -909,7 +990,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::initStreamVolume(media::AudioStreamType streamAidl,
+Status AudioPolicyService::initStreamVolume(AudioStreamType streamAidl,
                                             int32_t indexMinAidl,
                                             int32_t indexMaxAidl) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
@@ -932,13 +1013,14 @@
     return binderStatusFromStatusT(NO_ERROR);
 }
 
-Status AudioPolicyService::setStreamVolumeIndex(media::AudioStreamType streamAidl,
-                                                int32_t deviceAidl, int32_t indexAidl) {
+Status AudioPolicyService::setStreamVolumeIndex(AudioStreamType streamAidl,
+                                                const AudioDeviceDescription& deviceAidl,
+                                                int32_t indexAidl) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
     int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
 
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
@@ -956,12 +1038,13 @@
                                                                              device));
 }
 
-Status AudioPolicyService::getStreamVolumeIndex(media::AudioStreamType streamAidl,
-                                                int32_t deviceAidl, int32_t* _aidl_return) {
+Status AudioPolicyService::getStreamVolumeIndex(AudioStreamType streamAidl,
+                                                const AudioDeviceDescription& deviceAidl,
+                                                int32_t* _aidl_return) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
     int index;
 
     if (mAudioPolicyManager == NULL) {
@@ -979,12 +1062,13 @@
 }
 
 Status AudioPolicyService::setVolumeIndexForAttributes(
-        const media::AudioAttributesInternal& attrAidl, int32_t deviceAidl, int32_t indexAidl) {
+        const media::AudioAttributesInternal& attrAidl,
+        const AudioDeviceDescription& deviceAidl, int32_t indexAidl) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
     int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioAttributes(attributes, "169572641")));
 
@@ -1001,11 +1085,12 @@
 }
 
 Status AudioPolicyService::getVolumeIndexForAttributes(
-        const media::AudioAttributesInternal& attrAidl, int32_t deviceAidl, int32_t* _aidl_return) {
+        const media::AudioAttributesInternal& attrAidl,
+        const AudioDeviceDescription& deviceAidl, int32_t* _aidl_return) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
     int index;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             AudioValidator::validateAudioAttributes(attributes, "169572641")));
@@ -1059,7 +1144,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getStrategyForStream(media::AudioStreamType streamAidl,
+Status AudioPolicyService::getStrategyForStream(AudioStreamType streamAidl,
                                                 int32_t* _aidl_return) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
@@ -1081,30 +1166,9 @@
     return Status::ok();
 }
 
-//audio policy: use audio_device_t appropriately
-
-Status AudioPolicyService::getDevicesForStream(media::AudioStreamType streamAidl,
-                                               int32_t* _aidl_return) {
-    audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
-
-    if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
-        *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
-                legacy2aidl_audio_devices_t_int32_t(AUDIO_DEVICE_NONE));
-        return Status::ok();
-    }
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
-    Mutex::Autolock _l(mLock);
-    AutoCallerClear acc;
-    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
-            legacy2aidl_audio_devices_t_int32_t(mAudioPolicyManager->getDevicesForStream(stream)));
-    return Status::ok();
-}
-
 Status AudioPolicyService::getDevicesForAttributes(const media::AudioAttributesEx& attrAidl,
-                                                   std::vector<media::AudioDevice>* _aidl_return)
+                                                   bool forVolume,
+                                                   std::vector<AudioDevice>* _aidl_return)
 {
     AudioAttributes aa = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioAttributesEx_AudioAttributes(attrAidl));
@@ -1116,10 +1180,11 @@
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->getDevicesForAttributes(aa.getAttributes(), &devices)));
+            mAudioPolicyManager->getDevicesForAttributes(
+                    aa.getAttributes(), &devices, forVolume)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return Status::ok();
 }
 
@@ -1205,7 +1270,7 @@
     return binderStatusFromStatusT(mAudioPolicyManager->moveEffectsToIo(ids, io));
 }
 
-Status AudioPolicyService::isStreamActive(media::AudioStreamType streamAidl, int32_t inPastMsAidl,
+Status AudioPolicyService::isStreamActive(AudioStreamType streamAidl, int32_t inPastMsAidl,
                                           bool* _aidl_return) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
@@ -1224,7 +1289,7 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::isStreamActiveRemotely(media::AudioStreamType streamAidl,
+Status AudioPolicyService::isStreamActiveRemotely(AudioStreamType streamAidl,
                                                   int32_t inPastMsAidl,
                                                   bool* _aidl_return) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
@@ -1244,9 +1309,9 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::isSourceActive(media::AudioSourceType sourceAidl, bool* _aidl_return) {
+Status AudioPolicyService::isSourceActive(AudioSource sourceAidl, bool* _aidl_return) {
     audio_source_t source = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(sourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(sourceAidl));
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1274,7 +1339,7 @@
 
 Status AudioPolicyService::queryDefaultPreProcessing(
         int32_t audioSessionAidl,
-        media::Int* countAidl,
+        Int* countAidl,
         std::vector<media::EffectDescriptor>* _aidl_return) {
     audio_session_t audioSession = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_session_t(audioSessionAidl));
@@ -1298,11 +1363,11 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::addSourceDefaultEffect(const media::AudioUuid& typeAidl,
+Status AudioPolicyService::addSourceDefaultEffect(const AudioUuid& typeAidl,
                                                   const std::string& opPackageNameAidl,
-                                                  const media::AudioUuid& uuidAidl,
+                                                  const AudioUuid& uuidAidl,
                                                   int32_t priority,
-                                                  media::AudioSourceType sourceAidl,
+                                                  AudioSource sourceAidl,
                                                   int32_t* _aidl_return) {
     effect_uuid_t type = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioUuid_audio_uuid_t(typeAidl));
@@ -1311,7 +1376,7 @@
     effect_uuid_t uuid = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioUuid_audio_uuid_t(uuidAidl));
     audio_source_t source = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(sourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(sourceAidl));
     audio_unique_id_t id;
 
     sp<AudioPolicyEffects>audioPolicyEffects;
@@ -1325,10 +1390,10 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::addStreamDefaultEffect(const media::AudioUuid& typeAidl,
+Status AudioPolicyService::addStreamDefaultEffect(const AudioUuid& typeAidl,
                                                   const std::string& opPackageNameAidl,
-                                                  const media::AudioUuid& uuidAidl,
-                                                  int32_t priority, media::AudioUsage usageAidl,
+                                                  const AudioUuid& uuidAidl,
+                                                  int32_t priority, AudioUsage usageAidl,
                                                   int32_t* _aidl_return) {
     effect_uuid_t type = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioUuid_audio_uuid_t(typeAidl));
@@ -1376,7 +1441,7 @@
 }
 
 Status AudioPolicyService::setSupportedSystemUsages(
-        const std::vector<media::AudioUsage>& systemUsagesAidl) {
+        const std::vector<AudioUsage>& systemUsagesAidl) {
     size_t size = systemUsagesAidl.size();
     if (size > MAX_ITEMS_PER_LIST) {
         size = MAX_ITEMS_PER_LIST;
@@ -1415,7 +1480,7 @@
             mAudioPolicyManager->setAllowedCapturePolicy(uid, capturePolicy));
 }
 
-Status AudioPolicyService::getOffloadSupport(const media::AudioOffloadInfo& infoAidl,
+Status AudioPolicyService::getOffloadSupport(const AudioOffloadInfo& infoAidl,
                                              media::AudioOffloadMode* _aidl_return) {
     audio_offload_info_t info = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioOffloadInfo_audio_offload_info_t(infoAidl));
@@ -1431,11 +1496,11 @@
 }
 
 Status AudioPolicyService::isDirectOutputSupported(
-        const media::AudioConfigBase& configAidl,
+        const AudioConfigBase& configAidl,
         const media::AudioAttributesInternal& attributesAidl,
         bool* _aidl_return) {
     audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl));
+            aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl, false /*isInput*/));
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
@@ -1446,7 +1511,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes.usage)));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
 
     Mutex::Autolock _l(mLock);
     *_aidl_return = mAudioPolicyManager->isDirectOutputSupported(config, attributes);
@@ -1455,7 +1520,7 @@
 
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
-                                          media::AudioPortType typeAidl, media::Int* count,
+                                          media::AudioPortType typeAidl, Int* count,
                                           std::vector<media::AudioPort>* portsAidl,
                                           int32_t* _aidl_return) {
     audio_port_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
@@ -1487,12 +1552,9 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getAudioPort(const media::AudioPort& portAidl,
+Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPort* _aidl_return) {
-    audio_port_v7 port = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioPort_audio_port_v7(portAidl));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPort(port)));
-
+    audio_port_v7 port{ .id = portId };
     Mutex::Autolock _l(mLock);
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
@@ -1543,7 +1605,7 @@
                                                    IPCThreadState::self()->getCallingUid()));
 }
 
-Status AudioPolicyService::listAudioPatches(media::Int* count,
+Status AudioPolicyService::listAudioPatches(Int* count,
                                             std::vector<media::AudioPatch>* patchesAidl,
                                             int32_t* _aidl_return) {
     unsigned int num_patches = VALUE_OR_RETURN_BINDER_STATUS(
@@ -1610,7 +1672,7 @@
     _aidl_return->ioHandle = VALUE_OR_RETURN_BINDER_STATUS(
             legacy2aidl_audio_io_handle_t_int32_t(ioHandle));
     _aidl_return->device = VALUE_OR_RETURN_BINDER_STATUS(
-            legacy2aidl_audio_devices_t_int32_t(device));
+            legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     return Status::ok();
 }
 
@@ -1680,7 +1742,7 @@
 
 Status AudioPolicyService::setUidDeviceAffinities(
         int32_t uidAidl,
-        const std::vector<media::AudioDevice>& devicesAidl) {
+        const std::vector<AudioDevice>& devicesAidl) {
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
     AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
             convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
@@ -1713,7 +1775,7 @@
 
 Status AudioPolicyService::setUserIdDeviceAffinities(
         int32_t userIdAidl,
-        const std::vector<media::AudioDevice>& devicesAidl) {
+        const std::vector<AudioDevice>& devicesAidl) {
     int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
     AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
             convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
@@ -1762,7 +1824,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes.usage)));
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
 
     // startAudioSource should be created as the calling uid
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -1810,13 +1872,14 @@
 }
 
 
-Status AudioPolicyService::getStreamVolumeDB(media::AudioStreamType streamAidl, int32_t indexAidl,
-                                             int32_t deviceAidl, float* _aidl_return) {
+Status AudioPolicyService::getStreamVolumeDB(
+        AudioStreamType streamAidl, int32_t indexAidl,
+        const AudioDeviceDescription& deviceAidl, float* _aidl_return) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
     int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
     audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
 
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
@@ -1827,8 +1890,8 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getSurroundFormats(media::Int* count,
-        std::vector<media::audio::common::AudioFormat>* formats,
+Status AudioPolicyService::getSurroundFormats(Int* count,
+        std::vector<AudioFormatDescription>* formats,
         std::vector<bool>* formatsEnabled) {
     unsigned int numSurroundFormats = VALUE_OR_RETURN_BINDER_STATUS(
             convertIntegral<unsigned int>(count->value));
@@ -1850,7 +1913,8 @@
     numSurroundFormatsReq = std::min(numSurroundFormats, numSurroundFormatsReq);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(surroundFormats.get(), surroundFormats.get() + numSurroundFormatsReq,
-                         std::back_inserter(*formats), legacy2aidl_audio_format_t_AudioFormat)));
+                         std::back_inserter(*formats),
+                         legacy2aidl_audio_format_t_AudioFormatDescription)));
     formatsEnabled->insert(
             formatsEnabled->begin(),
             surroundFormatsEnabled.get(),
@@ -1860,7 +1924,7 @@
 }
 
 Status AudioPolicyService::getReportedSurroundFormats(
-        media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) {
+        Int* count, std::vector<AudioFormatDescription>* formats) {
     unsigned int numSurroundFormats = VALUE_OR_RETURN_BINDER_STATUS(
             convertIntegral<unsigned int>(count->value));
     if (numSurroundFormats > MAX_ITEMS_PER_LIST) {
@@ -1880,13 +1944,15 @@
     numSurroundFormatsReq = std::min(numSurroundFormats, numSurroundFormatsReq);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(surroundFormats.get(), surroundFormats.get() + numSurroundFormatsReq,
-                         std::back_inserter(*formats), legacy2aidl_audio_format_t_AudioFormat)));
+                         std::back_inserter(*formats),
+                         legacy2aidl_audio_format_t_AudioFormatDescription)));
     count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<uint32_t>(numSurroundFormats));
     return Status::ok();
 }
 
-Status AudioPolicyService::getHwOffloadEncodingFormatsSupportedForA2DP(
-        std::vector<media::audio::common::AudioFormat>* _aidl_return) {
+Status AudioPolicyService::getHwOffloadFormatsSupportedForBluetoothMedia(
+        const AudioDeviceDescription& deviceAidl,
+        std::vector<AudioFormatDescription>* _aidl_return) {
     std::vector<audio_format_t> formats;
 
     if (mAudioPolicyManager == NULL) {
@@ -1894,19 +1960,21 @@
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(deviceAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->getHwOffloadEncodingFormatsSupportedForA2DP(&formats)));
+            mAudioPolicyManager->getHwOffloadFormatsSupportedForBluetoothMedia(device, &formats)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
-            convertContainer<std::vector<media::audio::common::AudioFormat>>(
+            convertContainer<std::vector<AudioFormatDescription>>(
                     formats,
-                    legacy2aidl_audio_format_t_AudioFormat));
+                    legacy2aidl_audio_format_t_AudioFormatDescription));
     return Status::ok();
 }
 
 Status AudioPolicyService::setSurroundFormatEnabled(
-        media::audio::common::AudioFormat audioFormatAidl, bool enabled) {
+        const AudioFormatDescription& audioFormatAidl, bool enabled) {
     audio_format_t audioFormat = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioFormat_audio_format_t(audioFormatAidl));
+            aidl2legacy_AudioFormatDescription_audio_format_t(audioFormatAidl));
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
@@ -1916,34 +1984,43 @@
             mAudioPolicyManager->setSurroundFormatEnabled(audioFormat, enabled));
 }
 
-Status AudioPolicyService::setAssistantUid(int32_t uidAidl)
-{
-    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
-    Mutex::Autolock _l(mLock);
-    mUidPolicy->setAssistantUid(uid);
+Status convertInt32VectorToUidVectorWithLimit(
+        const std::vector<int32_t>& uidsAidl, std::vector<uid_t>& uids) {
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+        convertRangeWithLimit(uidsAidl.begin(),
+            uidsAidl.end(),
+            std::back_inserter(uids),
+            aidl2legacy_int32_t_uid_t,
+            MAX_ITEMS_PER_LIST)));
+
     return Status::ok();
 }
 
-Status AudioPolicyService::setHotwordDetectionServiceUid(int32_t uidAidl)
+Status AudioPolicyService::setAssistantServicesUids(const std::vector<int32_t>& uidsAidl)
 {
-    uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+    std::vector<uid_t> uids;
+    RETURN_IF_BINDER_ERROR(convertInt32VectorToUidVectorWithLimit(uidsAidl, uids));
+
     Mutex::Autolock _l(mLock);
-    mUidPolicy->setHotwordDetectionServiceUid(uid);
+    mUidPolicy->setAssistantUids(uids);
+    return Status::ok();
+}
+
+Status AudioPolicyService::setActiveAssistantServicesUids(
+        const std::vector<int32_t>& activeUidsAidl) {
+    std::vector<uid_t> activeUids;
+    RETURN_IF_BINDER_ERROR(convertInt32VectorToUidVectorWithLimit(activeUidsAidl, activeUids));
+
+    Mutex::Autolock _l(mLock);
+    mUidPolicy->setActiveAssistantUids(activeUids);
     return Status::ok();
 }
 
 Status AudioPolicyService::setA11yServicesUids(const std::vector<int32_t>& uidsAidl)
 {
-    size_t size = uidsAidl.size();
-    if (size > MAX_ITEMS_PER_LIST) {
-        size = MAX_ITEMS_PER_LIST;
-    }
     std::vector<uid_t> uids;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            convertRange(uidsAidl.begin(),
-                         uidsAidl.begin() + size,
-                         std::back_inserter(uids),
-                         aidl2legacy_int32_t_uid_t)));
+    RETURN_IF_BINDER_ERROR(convertInt32VectorToUidVectorWithLimit(uidsAidl, uids));
+
     Mutex::Autolock _l(mLock);
     mUidPolicy->setA11yUids(uids);
     return Status::ok();
@@ -1968,6 +2045,17 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::isUltrasoundSupported(bool* _aidl_return)
+{
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    Mutex::Autolock _l(mLock);
+    AutoCallerClear acc;
+    *_aidl_return = mAudioPolicyManager->isUltrasoundSupported();
+    return Status::ok();
+}
+
 Status AudioPolicyService::listAudioProductStrategies(
         std::vector<media::AudioProductStrategy>* _aidl_return) {
     AudioProductStrategyVector strategies;
@@ -2057,7 +2145,7 @@
 Status AudioPolicyService::setDevicesRoleForStrategy(
         int32_t strategyAidl,
         media::DeviceRole roleAidl,
-        const std::vector<media::AudioDevice>& devicesAidl) {
+        const std::vector<AudioDevice>& devicesAidl) {
     product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_product_strategy_t(strategyAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2097,7 +2185,7 @@
 Status AudioPolicyService::getDevicesForRoleAndStrategy(
         int32_t strategyAidl,
         media::DeviceRole roleAidl,
-        std::vector<media::AudioDevice>* _aidl_return) {
+        std::vector<AudioDevice>* _aidl_return) {
     product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_product_strategy_t(strategyAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2111,8 +2199,8 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getDevicesForRoleAndStrategy(strategy, role, devices)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return Status::ok();
 }
 
@@ -2123,11 +2211,11 @@
 }
 
 Status AudioPolicyService::setDevicesRoleForCapturePreset(
-        media::AudioSourceType audioSourceAidl,
+        AudioSource audioSourceAidl,
         media::DeviceRole roleAidl,
-        const std::vector<media::AudioDevice>& devicesAidl) {
+        const std::vector<AudioDevice>& devicesAidl) {
     audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(audioSourceAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_DeviceRole_device_role_t(roleAidl));
     AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2143,11 +2231,11 @@
 }
 
 Status AudioPolicyService::addDevicesRoleForCapturePreset(
-        media::AudioSourceType audioSourceAidl,
+        AudioSource audioSourceAidl,
         media::DeviceRole roleAidl,
-        const std::vector<media::AudioDevice>& devicesAidl) {
+        const std::vector<AudioDevice>& devicesAidl) {
     audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(audioSourceAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_DeviceRole_device_role_t(roleAidl));
     AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2163,11 +2251,11 @@
 }
 
 Status AudioPolicyService::removeDevicesRoleForCapturePreset(
-        media::AudioSourceType audioSourceAidl,
+        AudioSource audioSourceAidl,
         media::DeviceRole roleAidl,
-        const std::vector<media::AudioDevice>& devicesAidl) {
+        const std::vector<AudioDevice>& devicesAidl) {
     audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(audioSourceAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_DeviceRole_device_role_t(roleAidl));
     AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2182,10 +2270,10 @@
             mAudioPolicyManager->removeDevicesRoleForCapturePreset(audioSource, role, devices));
 }
 
-Status AudioPolicyService::clearDevicesRoleForCapturePreset(media::AudioSourceType audioSourceAidl,
+Status AudioPolicyService::clearDevicesRoleForCapturePreset(AudioSource audioSourceAidl,
                                                             media::DeviceRole roleAidl) {
     audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(audioSourceAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_DeviceRole_device_role_t(roleAidl));
 
@@ -2198,11 +2286,11 @@
 }
 
 Status AudioPolicyService::getDevicesForRoleAndCapturePreset(
-        media::AudioSourceType audioSourceAidl,
+        AudioSource audioSourceAidl,
         media::DeviceRole roleAidl,
-        std::vector<media::AudioDevice>* _aidl_return) {
+        std::vector<AudioDevice>* _aidl_return) {
     audio_source_t audioSource = VALUE_OR_RETURN_BINDER_STATUS(
-            aidl2legacy_AudioSourceType_audio_source_t(audioSourceAidl));
+            aidl2legacy_AudioSource_audio_source_t(audioSourceAidl));
     device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_DeviceRole_device_role_t(roleAidl));
     AudioDeviceTypeAddrVector devices;
@@ -2214,8 +2302,8 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             mAudioPolicyManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
-            convertContainer<std::vector<media::AudioDevice>>(devices,
-                                                              legacy2aidl_AudioDeviceTypeAddress));
+            convertContainer<std::vector<AudioDevice>>(devices,
+                                                       legacy2aidl_AudioDeviceTypeAddress));
     return Status::ok();
 }
 
@@ -2236,8 +2324,8 @@
 
 Status AudioPolicyService::canBeSpatialized(
         const std::optional<media::AudioAttributesInternal>& attrAidl,
-        const std::optional<media::AudioConfig>& configAidl,
-        const std::vector<media::AudioDevice>& devicesAidl,
+        const std::optional<AudioConfig>& configAidl,
+        const std::vector<AudioDevice>& devicesAidl,
         bool* _aidl_return) {
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
@@ -2250,7 +2338,8 @@
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
     if (configAidl.has_value()) {
         config = VALUE_OR_RETURN_BINDER_STATUS(
-                                    aidl2legacy_AudioConfig_audio_config_t(configAidl.value()));
+                                    aidl2legacy_AudioConfig_audio_config_t(configAidl.value(),
+                                    false /*isInput*/));
     }
     AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
             convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
@@ -2261,4 +2350,44 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::getDirectPlaybackSupport(const media::AudioAttributesInternal &attrAidl,
+                                                    const AudioConfig &configAidl,
+                                                    media::AudioDirectMode *_aidl_return) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    if (_aidl_return == nullptr) {
+        return binderStatusFromStatusT(BAD_VALUE);
+    }
+    audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    audio_config_t config = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
+    Mutex::Autolock _l(mLock);
+    *_aidl_return = static_cast<media::AudioDirectMode>(
+            VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_direct_mode_t_int32_t_mask(
+                    mAudioPolicyManager->getDirectPlaybackSupport(&attr, &config))));
+    return Status::ok();
+}
+
+Status AudioPolicyService::getDirectProfilesForAttributes(
+                                const media::AudioAttributesInternal& attrAidl,
+                                std::vector<media::audio::common::AudioProfile>* _aidl_return) {
+   if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioAttributesInternal_audio_attributes_t(attrAidl));
+    AudioProfileVector audioProfiles;
+
+    Mutex::Autolock _l(mLock);
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->getDirectProfilesForAttributes(&attr, audioProfiles)));
+    *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+            convertContainer<std::vector<media::audio::common::AudioProfile>>(
+                audioProfiles, legacy2aidl_AudioProfile_common, false /*isInput*/));
+
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 12cd136..e7d945f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -39,6 +39,7 @@
 #include <media/AidlConversion.h>
 #include <media/AudioEffect.h>
 #include <media/AudioParameter.h>
+#include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
 #include <sensorprivacy/SensorPrivacyManager.h>
@@ -60,6 +61,120 @@
 
 static const String16 sManageAudioPolicyPermission("android.permission.MANAGE_AUDIO_POLICY");
 
+// Creates an association between Binder code to name for IAudioPolicyService.
+#define IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(onNewAudioModulesAvailable) \
+BINDER_METHOD_ENTRY(setDeviceConnectionState) \
+BINDER_METHOD_ENTRY(getDeviceConnectionState) \
+BINDER_METHOD_ENTRY(handleDeviceConfigChange) \
+BINDER_METHOD_ENTRY(setPhoneState) \
+BINDER_METHOD_ENTRY(setForceUse) \
+BINDER_METHOD_ENTRY(getForceUse) \
+BINDER_METHOD_ENTRY(getOutput) \
+BINDER_METHOD_ENTRY(getOutputForAttr) \
+BINDER_METHOD_ENTRY(startOutput) \
+BINDER_METHOD_ENTRY(stopOutput) \
+BINDER_METHOD_ENTRY(releaseOutput) \
+BINDER_METHOD_ENTRY(getInputForAttr) \
+BINDER_METHOD_ENTRY(startInput) \
+BINDER_METHOD_ENTRY(stopInput) \
+BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(initStreamVolume) \
+BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
+BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
+BINDER_METHOD_ENTRY(setVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getMaxVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getMinVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getStrategyForStream) \
+BINDER_METHOD_ENTRY(getDevicesForAttributes) \
+BINDER_METHOD_ENTRY(getOutputForEffect) \
+BINDER_METHOD_ENTRY(registerEffect) \
+BINDER_METHOD_ENTRY(unregisterEffect) \
+BINDER_METHOD_ENTRY(setEffectEnabled) \
+BINDER_METHOD_ENTRY(moveEffectsToIo) \
+BINDER_METHOD_ENTRY(isStreamActive) \
+BINDER_METHOD_ENTRY(isStreamActiveRemotely) \
+BINDER_METHOD_ENTRY(isSourceActive) \
+BINDER_METHOD_ENTRY(queryDefaultPreProcessing) \
+BINDER_METHOD_ENTRY(addSourceDefaultEffect) \
+BINDER_METHOD_ENTRY(addStreamDefaultEffect) \
+BINDER_METHOD_ENTRY(removeSourceDefaultEffect) \
+BINDER_METHOD_ENTRY(removeStreamDefaultEffect) \
+BINDER_METHOD_ENTRY(setSupportedSystemUsages) \
+BINDER_METHOD_ENTRY(setAllowedCapturePolicy) \
+BINDER_METHOD_ENTRY(getOffloadSupport) \
+BINDER_METHOD_ENTRY(isDirectOutputSupported) \
+BINDER_METHOD_ENTRY(listAudioPorts) \
+BINDER_METHOD_ENTRY(getAudioPort) \
+BINDER_METHOD_ENTRY(createAudioPatch) \
+BINDER_METHOD_ENTRY(releaseAudioPatch) \
+BINDER_METHOD_ENTRY(listAudioPatches) \
+BINDER_METHOD_ENTRY(setAudioPortConfig) \
+BINDER_METHOD_ENTRY(registerClient) \
+BINDER_METHOD_ENTRY(setAudioPortCallbacksEnabled) \
+BINDER_METHOD_ENTRY(setAudioVolumeGroupCallbacksEnabled) \
+BINDER_METHOD_ENTRY(acquireSoundTriggerSession) \
+BINDER_METHOD_ENTRY(releaseSoundTriggerSession) \
+BINDER_METHOD_ENTRY(getPhoneState) \
+BINDER_METHOD_ENTRY(registerPolicyMixes) \
+BINDER_METHOD_ENTRY(setUidDeviceAffinities) \
+BINDER_METHOD_ENTRY(removeUidDeviceAffinities) \
+BINDER_METHOD_ENTRY(setUserIdDeviceAffinities) \
+BINDER_METHOD_ENTRY(removeUserIdDeviceAffinities) \
+BINDER_METHOD_ENTRY(startAudioSource) \
+BINDER_METHOD_ENTRY(stopAudioSource) \
+BINDER_METHOD_ENTRY(setMasterMono) \
+BINDER_METHOD_ENTRY(getMasterMono) \
+BINDER_METHOD_ENTRY(getStreamVolumeDB) \
+BINDER_METHOD_ENTRY(getSurroundFormats) \
+BINDER_METHOD_ENTRY(getReportedSurroundFormats) \
+BINDER_METHOD_ENTRY(getHwOffloadFormatsSupportedForBluetoothMedia) \
+BINDER_METHOD_ENTRY(setSurroundFormatEnabled) \
+BINDER_METHOD_ENTRY(setAssistantServicesUids) \
+BINDER_METHOD_ENTRY(setActiveAssistantServicesUids) \
+BINDER_METHOD_ENTRY(setA11yServicesUids) \
+BINDER_METHOD_ENTRY(setCurrentImeUid) \
+BINDER_METHOD_ENTRY(isHapticPlaybackSupported) \
+BINDER_METHOD_ENTRY(isUltrasoundSupported) \
+BINDER_METHOD_ENTRY(listAudioProductStrategies) \
+BINDER_METHOD_ENTRY(getProductStrategyFromAudioAttributes) \
+BINDER_METHOD_ENTRY(listAudioVolumeGroups) \
+BINDER_METHOD_ENTRY(getVolumeGroupFromAudioAttributes) \
+BINDER_METHOD_ENTRY(setRttEnabled) \
+BINDER_METHOD_ENTRY(isCallScreenModeSupported) \
+BINDER_METHOD_ENTRY(setDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(removeDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(getDevicesForRoleAndStrategy) \
+BINDER_METHOD_ENTRY(setDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(addDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(removeDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(clearDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(getDevicesForRoleAndCapturePreset) \
+BINDER_METHOD_ENTRY(registerSoundTriggerCaptureStateListener) \
+BINDER_METHOD_ENTRY(getSpatializer) \
+BINDER_METHOD_ENTRY(canBeSpatialized) \
+BINDER_METHOD_ENTRY(getDirectPlaybackSupport) \
+BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+
+// singleton for Binder Method Statistics for IAudioPolicyService
+static auto& getIAudioPolicyServiceStatistics() {
+    using Code = int;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+        {(Code)media::BnAudioPolicyService::TRANSACTION_##ENTRY, #ENTRY},
+
+    static mediautils::MethodStatistics<Code> methodStatistics{
+        IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
+        METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+    };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+    return methodStatistics;
+}
+
 // ----------------------------------------------------------------------------
 
 static AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface)
@@ -114,6 +229,13 @@
 
 void AudioPolicyService::onFirstRef()
 {
+    // Log an AudioPolicy "constructor" mediametrics event on first ref.
+    // This records the time it takes to load the audio modules and devices.
+    mediametrics::Defer defer([beginNs = systemTime()] {
+        mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_POLICY)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+            .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+            .record(); });
     {
         Mutex::Autolock _l(mLock);
 
@@ -366,6 +488,19 @@
     }
 }
 
+void AudioPolicyService::onVolumeRangeInitRequest()
+{
+    mOutputCommandThread->volRangeInitReqCommand();
+}
+
+void AudioPolicyService::doOnVolumeRangeInitRequest()
+{
+    Mutex::Autolock _l(mNotificationClientsLock);
+    for (size_t i = 0; i < mNotificationClients.size(); i++) {
+        mNotificationClients.valueAt(i)->onVolumeRangeInitRequest();
+    }
+}
+
 void AudioPolicyService::onCheckSpatializer()
 {
     Mutex::Autolock _l(mLock);
@@ -383,6 +518,8 @@
 {
     Mutex::Autolock _l(mLock);
 
+    ALOGI("%s mSpatializer %p level %d", __func__, mSpatializer.get(), (int)mSpatializer->getLevel());
+
     if (mSpatializer != nullptr) {
         // Note: mSpatializer != nullptr =>  mAudioPolicyManager != nullptr
         if (mSpatializer->getLevel() != media::SpatializationLevel::NONE) {
@@ -397,6 +534,7 @@
             if (status == NO_ERROR && currentOutput == newOutput) {
                 return;
             }
+            size_t numActiveTracks = countActiveClientsOnOutput_l(newOutput);
             mLock.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
@@ -404,7 +542,7 @@
                 mLock.lock();
                 return;
             }
-            status = mSpatializer->attachOutput(newOutput);
+            status = mSpatializer->attachOutput(newOutput, numActiveTracks);
             mLock.lock();
             if (status != NO_ERROR) {
                 mAudioPolicyManager->releaseSpatializerOutput(newOutput);
@@ -421,6 +559,43 @@
     }
 }
 
+size_t AudioPolicyService::countActiveClientsOnOutput_l(
+        audio_io_handle_t output, bool spatializedOnly) {
+    size_t count = 0;
+    for (size_t i = 0; i < mAudioPlaybackClients.size(); i++) {
+        auto client = mAudioPlaybackClients.valueAt(i);
+        if (client->io == output && client->active
+                && (!spatializedOnly || client->isSpatialized)) {
+            count++;
+        }
+    }
+    return count;
+}
+
+void AudioPolicyService::onUpdateActiveSpatializerTracks_l() {
+    if (mSpatializer == nullptr) {
+        return;
+    }
+    mOutputCommandThread->updateActiveSpatializerTracksCommand();
+}
+
+void AudioPolicyService::doOnUpdateActiveSpatializerTracks()
+{
+    sp<Spatializer> spatializer;
+    size_t activeClients;
+    {
+        Mutex::Autolock _l(mLock);
+        if (mSpatializer == nullptr) {
+            return;
+        }
+        spatializer = mSpatializer;
+        activeClients = countActiveClientsOnOutput_l(mSpatializer->getOutput());
+    }
+    if (spatializer != nullptr) {
+        spatializer->updateActiveTracks(activeClients);
+    }
+}
+
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
                                                 audio_patch_handle_t *handle,
                                                 int delayMs)
@@ -477,7 +652,7 @@
     }
 }
 
-void AudioPolicyService::NotificationClient::onAudioVolumeGroupChanged(volume_group_t group, 
+void AudioPolicyService::NotificationClient::onAudioVolumeGroupChanged(volume_group_t group,
                                                                       int flags)
 {
     if (mAudioPolicyServiceClient != 0 && mAudioVolumeGroupCallbacksEnabled) {
@@ -510,22 +685,24 @@
             int32_t eventAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(event));
             media::RecordClientInfo clientInfoAidl = VALUE_OR_RETURN_STATUS(
                     legacy2aidl_record_client_info_t_RecordClientInfo(*clientInfo));
-            media::AudioConfigBase clientConfigAidl = VALUE_OR_RETURN_STATUS(
-                    legacy2aidl_audio_config_base_t_AudioConfigBase(*clientConfig));
+            AudioConfigBase clientConfigAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_config_base_t_AudioConfigBase(
+                            *clientConfig, true /*isInput*/));
             std::vector<media::EffectDescriptor> clientEffectsAidl = VALUE_OR_RETURN_STATUS(
                     convertContainer<std::vector<media::EffectDescriptor>>(
                             clientEffects,
                             legacy2aidl_effect_descriptor_t_EffectDescriptor));
-            media::AudioConfigBase deviceConfigAidl = VALUE_OR_RETURN_STATUS(
-                    legacy2aidl_audio_config_base_t_AudioConfigBase(*deviceConfig));
+            AudioConfigBase deviceConfigAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_config_base_t_AudioConfigBase(
+                            *deviceConfig, true /*isInput*/));
             std::vector<media::EffectDescriptor> effectsAidl = VALUE_OR_RETURN_STATUS(
                     convertContainer<std::vector<media::EffectDescriptor>>(
                             effects,
                             legacy2aidl_effect_descriptor_t_EffectDescriptor));
             int32_t patchHandleAidl = VALUE_OR_RETURN_STATUS(
                     legacy2aidl_audio_patch_handle_t_int32_t(patchHandle));
-            media::AudioSourceType sourceAidl = VALUE_OR_RETURN_STATUS(
-                    legacy2aidl_audio_source_t_AudioSourceType(source));
+            media::audio::common::AudioSource sourceAidl = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_audio_source_t_AudioSource(source));
             return aidl_utils::statusTFromBinderStatus(
                     mAudioPolicyServiceClient->onRecordingConfigurationUpdate(eventAidl,
                                                                               clientInfoAidl,
@@ -557,6 +734,13 @@
     }
 }
 
+void AudioPolicyService::NotificationClient::onVolumeRangeInitRequest()
+{
+    if (mAudioPolicyServiceClient != 0 && isServiceUid(mUid)) {
+        mAudioPolicyServiceClient->onVolumeRangeInitRequest();
+    }
+}
+
 void AudioPolicyService::binderDied(const wp<IBinder>& who) {
     ALOGW("binderDied() %p, calling pid %d", who.unsafe_get(),
             IPCThreadState::self()->getCallingPid());
@@ -578,20 +762,27 @@
     char buffer[SIZE];
     String8 result;
 
-    snprintf(buffer, SIZE, "AudioPolicyManager: %p\n", mAudioPolicyManager);
+    snprintf(buffer, SIZE, "Supported System Usages:\n  ");
     result.append(buffer);
-    snprintf(buffer, SIZE, "Command Thread: %p\n", mAudioCommandThread.get());
-    result.append(buffer);
-
-    snprintf(buffer, SIZE, "Supported System Usages:\n");
-    result.append(buffer);
-    for (std::vector<audio_usage_t>::iterator it = mSupportedSystemUsages.begin();
-        it != mSupportedSystemUsages.end(); ++it) {
-        snprintf(buffer, SIZE, "\t%d\n", *it);
-        result.append(buffer);
+    std::stringstream msg;
+    size_t i = 0;
+    for (auto usage : mSupportedSystemUsages) {
+        if (i++ != 0) msg << ", ";
+        if (const char* strUsage = audio_usage_to_string(usage); strUsage) {
+            msg << strUsage;
+        } else {
+            msg << usage << " (unknown)";
+        }
     }
+    if (i == 0) {
+        msg << "None";
+    }
+    msg << std::endl;
+    result.append(msg.str().c_str());
 
     write(fd, result.string(), result.size());
+
+    mUidPolicy->dumpInternals(fd);
     return NO_ERROR;
 }
 
@@ -605,13 +796,20 @@
 {
 //    Go over all active clients and allow capture (does not force silence) in the
 //    following cases:
-//    The client is the assistant
+//    The client is in the active assistant list
+//         AND is TOP
+//               AND an accessibility service is TOP
+//                  AND source is either VOICE_RECOGNITION OR HOTWORD
+//               OR there is no active privacy sensitive capture or call
+//                          OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//                  AND source is VOICE_RECOGNITION OR HOTWORD
+//    The client is an assistant AND active assistant is not being used
 //        AND an accessibility service is on TOP or a RTT call is active
 //                AND the source is VOICE_RECOGNITION or HOTWORD
-//            OR uses VOICE_RECOGNITION AND is on TOP
-//                OR uses HOTWORD
-//            AND there is no active privacy sensitive capture or call
+//        OR there is no active privacy sensitive capture or call
 //                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//            AND is TOP most recent assistant and uses VOICE_RECOGNITION or HOTWORD
+//                OR there is no top recent assistant and source is HOTWORD
 //    OR The client is an accessibility service
 //        AND Is on TOP
 //                AND the source is VOICE_RECOGNITION or HOTWORD
@@ -639,13 +837,16 @@
     sp<AudioRecordClient> latestActive;
     sp<AudioRecordClient> topSensitiveActive;
     sp<AudioRecordClient> latestSensitiveActiveOrComm;
+    sp<AudioRecordClient> latestActiveAssistant;
 
     nsecs_t topStartNs = 0;
     nsecs_t latestStartNs = 0;
     nsecs_t topSensitiveStartNs = 0;
     nsecs_t latestSensitiveStartNs = 0;
+    nsecs_t latestAssistantStartNs = 0;
     bool isA11yOnTop = mUidPolicy->isA11yOnTop();
     bool isAssistantOnTop = false;
+    bool useActiveAssistantList = false;
     bool isSensitiveActive = false;
     bool isInCall = mPhoneState == AUDIO_MODE_IN_CALL;
     bool isInCommunication = mPhoneState == AUDIO_MODE_IN_COMMUNICATION;
@@ -680,6 +881,7 @@
         // for top or latest active to avoid masking regular clients started before
         if (!isAccessibility && !isVirtualSource(current->attributes.source)) {
             bool isAssistant = mUidPolicy->isAssistantUid(currentUid);
+            bool isActiveAssistant = mUidPolicy->isActiveAssistantUid(currentUid);
             bool isPrivacySensitive =
                     (current->attributes.flags & AUDIO_FLAG_CAPTURE_PRIVATE) != 0;
 
@@ -697,6 +899,14 @@
                 }
                 if (isAssistant) {
                     isAssistantOnTop = true;
+                    if (isActiveAssistant) {
+                        useActiveAssistantList = true;
+                    } else if (!useActiveAssistantList) {
+                        if (current->startTimeNs > latestAssistantStartNs) {
+                            latestActiveAssistant = current;
+                            latestAssistantStartNs = current->startTimeNs;
+                        }
+                    }
                 }
             }
             // Clients capturing for HOTWORD are not considered
@@ -728,7 +938,8 @@
         if (current->attributes.source != AUDIO_SOURCE_HOTWORD) {
             onlyHotwordActive = false;
         }
-        if (currentUid == mPhoneStateOwnerUid) {
+        if (currentUid == mPhoneStateOwnerUid &&
+                !isVirtualSource(current->attributes.source)) {
             isPhoneStateOwnerActive = true;
         }
     }
@@ -775,6 +986,8 @@
             current->attributionSource.uid == topActive->attributionSource.uid;
         bool isTopOrLatestSensitive = topSensitiveActive == nullptr ? false :
             current->attributionSource.uid == topSensitiveActive->attributionSource.uid;
+        bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
+            current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
 
         auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mLock) {
             uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
@@ -804,23 +1017,45 @@
         } else if (isVirtualSource(source)) {
             // Allow capture for virtual (remote submix, call audio TX or RX...) sources
             allowCapture = true;
-        } else if (mUidPolicy->isAssistantUid(currentUid)) {
+        } else if (!useActiveAssistantList && mUidPolicy->isAssistantUid(currentUid)) {
             // For assistant allow capture if:
-            //     An accessibility service is on TOP or a RTT call is active
+            //     Active assistant list is not being used
+            //     AND accessibility service is on TOP or a RTT call is active
             //            AND the source is VOICE_RECOGNITION or HOTWORD
-            //     OR is on TOP AND uses VOICE_RECOGNITION
-            //            OR uses HOTWORD
-            //         AND there is no active privacy sensitive capture or call
-            //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+            //     OR there is no active privacy sensitive capture or call
+            //          OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+            //            AND is latest TOP assistant AND
+            //               uses VOICE_RECOGNITION OR uses HOTWORD
+            //            OR there is no TOP assistant and uses HOTWORD
             if (isA11yOnTop || rttCallActive) {
                 if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
                     allowCapture = true;
                 }
-            } else {
-                if (((isAssistantOnTop && source == AUDIO_SOURCE_VOICE_RECOGNITION) ||
-                        source == AUDIO_SOURCE_HOTWORD)
-                        && !(isSensitiveActive && !current->canCaptureOutput)
+            } else if (!(isSensitiveActive && !current->canCaptureOutput)
+                    && canCaptureIfInCallOrCommunication(current)) {
+                if (isTopOrLatestAssistant
+                    && (source == AUDIO_SOURCE_VOICE_RECOGNITION
+                        || source == AUDIO_SOURCE_HOTWORD)) {
+                        allowCapture = true;
+                } else if (!isAssistantOnTop && (source == AUDIO_SOURCE_HOTWORD)) {
+                    allowCapture = true;
+                }
+            }
+        } else if (useActiveAssistantList && mUidPolicy->isActiveAssistantUid(currentUid)) {
+            // For assistant on active list and on top allow capture if:
+            //     An accessibility service is on TOP
+            //         AND the source is VOICE_RECOGNITION or HOTWORD
+            //     OR there is no active privacy sensitive capture or call
+            //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+            //         AND uses VOICE_RECOGNITION OR uses HOTWORD
+            if (isA11yOnTop) {
+                if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
+                    allowCapture = true;
+                }
+            } else if (!(isSensitiveActive && !current->canCaptureOutput)
                         && canCaptureIfInCallOrCommunication(current)) {
+                if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
+                {
                     allowCapture = true;
                 }
             }
@@ -907,6 +1142,7 @@
     switch (source) {
         case AUDIO_SOURCE_FM_TUNER:
         case AUDIO_SOURCE_ECHO_REFERENCE:
+        case AUDIO_SOURCE_REMOTE_SUBMIX:
             return false;
         default:
             break;
@@ -955,17 +1191,35 @@
         }
 
         dumpInternals(fd);
+
+        String8 actPtr = String8::format("AudioCommandThread: %p\n", mAudioCommandThread.get());
+        write(fd, actPtr.string(), actPtr.size());
         if (mAudioCommandThread != 0) {
             mAudioCommandThread->dump(fd);
         }
 
+        String8 octPtr = String8::format("OutputCommandThread: %p\n", mOutputCommandThread.get());
+        write(fd, octPtr.string(), octPtr.size());
+        if (mOutputCommandThread != 0) {
+            mOutputCommandThread->dump(fd);
+        }
+
         if (mAudioPolicyManager) {
             mAudioPolicyManager->dump(fd);
+        } else {
+            String8 apmPtr = String8::format("AudioPolicyManager: %p\n", mAudioPolicyManager);
+            write(fd, apmPtr.string(), apmPtr.size());
         }
 
         mPackageManager.dump(fd);
 
         dumpReleaseLock(mLock, locked);
+
+        {
+            std::string timeCheckStats = getIAudioPolicyServiceStatistics().dump();
+            dprintf(fd, "\nIAudioPolicyService binder call profile\n");
+            write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+        }
     }
     return NO_ERROR;
 }
@@ -1026,19 +1280,19 @@
         case TRANSACTION_isStreamActive:
         case TRANSACTION_isStreamActiveRemotely:
         case TRANSACTION_isSourceActive:
-        case TRANSACTION_getDevicesForStream:
         case TRANSACTION_registerPolicyMixes:
         case TRANSACTION_setMasterMono:
         case TRANSACTION_getSurroundFormats:
         case TRANSACTION_getReportedSurroundFormats:
         case TRANSACTION_setSurroundFormatEnabled:
-        case TRANSACTION_setAssistantUid:
+        case TRANSACTION_setAssistantServicesUids:
+        case TRANSACTION_setActiveAssistantServicesUids:
         case TRANSACTION_setA11yServicesUids:
         case TRANSACTION_setUidDeviceAffinities:
         case TRANSACTION_removeUidDeviceAffinities:
         case TRANSACTION_setUserIdDeviceAffinities:
         case TRANSACTION_removeUserIdDeviceAffinities:
-        case TRANSACTION_getHwOffloadEncodingFormatsSupportedForA2DP:
+        case TRANSACTION_getHwOffloadFormatsSupportedForBluetoothMedia:
         case TRANSACTION_listAudioVolumeGroups:
         case TRANSACTION_getVolumeGroupFromAudioAttributes:
         case TRANSACTION_acquireSoundTriggerSession:
@@ -1071,8 +1325,20 @@
             break;
     }
 
-    std::string tag("IAudioPolicyService command " + std::to_string(code));
-    TimeCheck check(tag.c_str());
+    const std::string methodName = getIAudioPolicyServiceStatistics().getMethodForCode(code);
+    mediautils::TimeCheck check(
+            std::string("IAudioPolicyService::").append(methodName),
+            [code, methodName](bool timeout, float elapsedMs) { // don't move methodName.
+        if (timeout) {
+            mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_POLICY)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT)
+                .set(AMEDIAMETRICS_PROP_METHODCODE, int64_t(code))
+                .set(AMEDIAMETRICS_PROP_METHODNAME, methodName.c_str())
+                .record();
+        } else {
+            getIAudioPolicyServiceStatistics().event(code, elapsedMs);
+        }
+    });
 
     switch (code) {
         case SHELL_COMMAND_TRANSACTION: {
@@ -1394,6 +1660,9 @@
     }
 }
 
+void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused) {
+}
+
 void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
     updateUid(&mOverrideUids, uid, active, ActivityManager::PROCESS_STATE_UNKNOWN, insert);
 }
@@ -1470,6 +1739,66 @@
     return it != mA11yUids.end();
 }
 
+void AudioPolicyService::UidPolicy::setAssistantUids(const std::vector<uid_t>& uids) {
+    mAssistantUids.clear();
+    mAssistantUids = uids;
+}
+
+bool AudioPolicyService::UidPolicy::isAssistantUid(uid_t uid)
+{
+    std::vector<uid_t>::iterator it = find(mAssistantUids.begin(), mAssistantUids.end(), uid);
+    return it != mAssistantUids.end();
+}
+
+void AudioPolicyService::UidPolicy::setActiveAssistantUids(const std::vector<uid_t>& activeUids) {
+    mActiveAssistantUids = activeUids;
+}
+
+bool AudioPolicyService::UidPolicy::isActiveAssistantUid(uid_t uid)
+{
+    std::vector<uid_t>::iterator it = find(mActiveAssistantUids.begin(),
+            mActiveAssistantUids.end(), uid);
+    return it != mActiveAssistantUids.end();
+}
+
+void AudioPolicyService::UidPolicy::dumpInternals(int fd) {
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    auto appendUidsToResult = [&](const char* title, const std::vector<uid_t> &uids) {
+        snprintf(buffer, SIZE, "\t%s: \n", title);
+        result.append(buffer);
+        int counter = 0;
+        if (uids.empty()) {
+            snprintf(buffer, SIZE, "\t\tNo UIDs present.\n");
+            result.append(buffer);
+            return;
+        }
+        for (const auto &uid : uids) {
+            snprintf(buffer, SIZE, "\t\tUID[%d]=%d\n", counter++, uid);
+            result.append(buffer);
+        }
+    };
+
+    snprintf(buffer, SIZE, "UID Policy:\n");
+    result.append(buffer);
+    snprintf(buffer, SIZE, "\tmObserverRegistered=%s\n",(mObserverRegistered ? "True":"False"));
+    result.append(buffer);
+
+    appendUidsToResult("Assistants UIDs", mAssistantUids);
+    appendUidsToResult("Active Assistants UIDs", mActiveAssistantUids);
+
+    appendUidsToResult("Accessibility UIDs", mA11yUids);
+
+    snprintf(buffer, SIZE, "\tInput Method Service UID=%d\n", mCurrentImeUid);
+    result.append(buffer);
+
+    snprintf(buffer, SIZE, "\tIs RTT Enabled: %s\n", (mRttEnabled ? "True":"False"));
+    result.append(buffer);
+
+    write(fd, result.string(), result.size());
+}
+
 // -----------  AudioPolicyService::SensorPrivacyService implementation ----------
 void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
     SensorPrivacyManager spm;
@@ -1477,14 +1806,6 @@
     spm.addSensorPrivacyListener(this);
 }
 
-void AudioPolicyService::SensorPrivacyPolicy::registerSelfForMicrophoneOnly(int userId) {
-    SensorPrivacyManager spm;
-    mSensorPrivacyEnabled = spm.isIndividualSensorPrivacyEnabled(userId,
-            SensorPrivacyManager::INDIVIDUAL_SENSOR_MICROPHONE);
-    spm.addIndividualSensorPrivacyListener(userId,
-            SensorPrivacyManager::INDIVIDUAL_SENSOR_MICROPHONE, this);
-}
-
 void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
     SensorPrivacyManager spm;
     spm.removeSensorPrivacyListener(this);
@@ -1494,7 +1815,8 @@
     return mSensorPrivacyEnabled;
 }
 
-binder::Status AudioPolicyService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
+binder::Status AudioPolicyService::SensorPrivacyPolicy::onSensorPrivacyChanged(
+    int toggleType __unused, int sensor __unused, bool enabled) {
     mSensorPrivacyEnabled = enabled;
     sp<AudioPolicyService> service = mService.promote();
     if (service != nullptr) {
@@ -1636,12 +1958,16 @@
     while (!exitPending())
     {
         sp<AudioPolicyService> svc;
+        int numTimesBecameEmpty = 0;
         while (!mAudioCommands.isEmpty() && !exitPending()) {
             nsecs_t curTime = systemTime();
             // commands are sorted by increasing time stamp: execute them from index 0 and up
             if (mAudioCommands[0]->mTime <= curTime) {
                 sp<AudioCommand> command = mAudioCommands[0];
                 mAudioCommands.removeAt(0);
+                if (mAudioCommands.isEmpty()) {
+                  ++numTimesBecameEmpty;
+                }
                 mLastCommand = command;
 
                 switch (command->mCommand) {
@@ -1833,8 +2159,8 @@
                     mLock.lock();
                     } break;
 
-                case CHECK_SPATIALIZER: {
-                    ALOGV("AudioCommandThread() processing updateUID states");
+                case CHECK_SPATIALIZER_OUTPUT: {
+                    ALOGV("AudioCommandThread() processing check spatializer");
                     svc = mService.promote();
                     if (svc == 0) {
                         break;
@@ -1844,6 +2170,28 @@
                     mLock.lock();
                     } break;
 
+                case UPDATE_ACTIVE_SPATIALIZER_TRACKS: {
+                    ALOGV("AudioCommandThread() processing update spatializer tracks");
+                    svc = mService.promote();
+                    if (svc == 0) {
+                        break;
+                    }
+                    mLock.unlock();
+                    svc->doOnUpdateActiveSpatializerTracks();
+                    mLock.lock();
+                    } break;
+
+                case VOL_RANGE_INIT_REQUEST: {
+                    ALOGV("AudioCommandThread() processing volume range init request");
+                    svc = mService.promote();
+                    if (svc == 0) {
+                        break;
+                    }
+                    mLock.unlock();
+                    svc->doOnVolumeRangeInitRequest();
+                    mLock.lock();
+                    } break;
+
                 default:
                     ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
                 }
@@ -1867,8 +2215,9 @@
             }
         }
 
-        // release delayed commands wake lock if the queue is empty
-        if (mAudioCommands.isEmpty()) {
+        // release delayed commands wake lock as many times as we made the  queue is
+        // empty during popping.
+        while (numTimesBecameEmpty--) {
             release_wake_lock(mName.string());
         }
 
@@ -1897,10 +2246,6 @@
     char buffer[SIZE];
     String8 result;
 
-    snprintf(buffer, SIZE, "AudioCommandThread %p Dump\n", this);
-    result.append(buffer);
-    write(fd, result.string(), result.size());
-
     const bool locked = dumpTryLock(mLock);
     if (!locked) {
         String8 result2(kCmdDeadlockedString);
@@ -2158,11 +2503,27 @@
 void AudioPolicyService::AudioCommandThread::checkSpatializerCommand()
 {
     sp<AudioCommand>command = new AudioCommand();
-    command->mCommand = CHECK_SPATIALIZER;
+    command->mCommand = CHECK_SPATIALIZER_OUTPUT;
     ALOGV("AudioCommandThread() adding check spatializer");
     sendCommand(command);
 }
 
+void AudioPolicyService::AudioCommandThread::updateActiveSpatializerTracksCommand()
+{
+    sp<AudioCommand>command = new AudioCommand();
+    command->mCommand = UPDATE_ACTIVE_SPATIALIZER_TRACKS;
+    ALOGV("AudioCommandThread() adding update active spatializer tracks");
+    sendCommand(command);
+}
+
+void AudioPolicyService::AudioCommandThread::volRangeInitReqCommand()
+{
+    sp<AudioCommand>command = new AudioCommand();
+    command->mCommand = VOL_RANGE_INIT_REQUEST;
+    ALOGV("AudioCommandThread() adding volume range init request");
+    sendCommand(command);
+}
+
 status_t AudioPolicyService::AudioCommandThread::sendCommand(sp<AudioCommand>& command, int delayMs)
 {
     {
@@ -2330,6 +2691,10 @@
 
         } break;
 
+        case VOL_RANGE_INIT_REQUEST: {
+            // command may come from different requests, do not filter
+        } break;
+
         default:
             break;
         }
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 27c4e1c..3a08cf8 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -49,6 +49,17 @@
 namespace android {
 
 using content::AttributionSourceState;
+using media::audio::common::AudioConfig;
+using media::audio::common::AudioConfigBase;
+using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioMode;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioUsage;
+using media::audio::common::AudioUuid;
+using media::audio::common::Int;
 
 // ----------------------------------------------------------------------------
 
@@ -71,25 +82,24 @@
     //
     binder::Status onNewAudioModulesAvailable() override;
     binder::Status setDeviceConnectionState(
-            const media::AudioDevice& device,
             media::AudioPolicyDeviceState state,
-            const std::string& deviceName,
-            media::audio::common::AudioFormat encodedFormat) override;
-    binder::Status getDeviceConnectionState(const media::AudioDevice& device,
+            const android::media::audio::common::AudioPort& port,
+            const AudioFormatDescription& encodedFormat) override;
+    binder::Status getDeviceConnectionState(const AudioDevice& device,
                                             media::AudioPolicyDeviceState* _aidl_return) override;
     binder::Status handleDeviceConfigChange(
-            const media::AudioDevice& device,
+            const AudioDevice& device,
             const std::string& deviceName,
-            media::audio::common::AudioFormat encodedFormat) override;
-    binder::Status setPhoneState(media::AudioMode state, int32_t uid) override;
+            const AudioFormatDescription& encodedFormat) override;
+    binder::Status setPhoneState(AudioMode state, int32_t uid) override;
     binder::Status setForceUse(media::AudioPolicyForceUse usage,
                                media::AudioPolicyForcedConfig config) override;
     binder::Status getForceUse(media::AudioPolicyForceUse usage,
                                media::AudioPolicyForcedConfig* _aidl_return) override;
-    binder::Status getOutput(media::AudioStreamType stream, int32_t* _aidl_return) override;
+    binder::Status getOutput(AudioStreamType stream, int32_t* _aidl_return) override;
     binder::Status getOutputForAttr(const media::AudioAttributesInternal& attr, int32_t session,
                                     const AttributionSourceState &attributionSource,
-                                    const media::AudioConfig& config,
+                                    const AudioConfig& config,
                                     int32_t flags, int32_t selectedDeviceId,
                                     media::GetOutputForAttrResponse* _aidl_return) override;
     binder::Status startOutput(int32_t portId) override;
@@ -98,32 +108,35 @@
     binder::Status getInputForAttr(const media::AudioAttributesInternal& attr, int32_t input,
                                    int32_t riid, int32_t session,
                                    const AttributionSourceState &attributionSource,
-                                   const media::AudioConfigBase& config, int32_t flags,
+                                   const AudioConfigBase& config, int32_t flags,
                                    int32_t selectedDeviceId,
                                    media::GetInputForAttrResponse* _aidl_return) override;
     binder::Status startInput(int32_t portId) override;
     binder::Status stopInput(int32_t portId) override;
     binder::Status releaseInput(int32_t portId) override;
-    binder::Status initStreamVolume(media::AudioStreamType stream, int32_t indexMin,
+    binder::Status initStreamVolume(AudioStreamType stream, int32_t indexMin,
                                     int32_t indexMax) override;
-    binder::Status setStreamVolumeIndex(media::AudioStreamType stream, int32_t device,
+    binder::Status setStreamVolumeIndex(AudioStreamType stream,
+                                        const AudioDeviceDescription& device,
                                         int32_t index) override;
-    binder::Status getStreamVolumeIndex(media::AudioStreamType stream, int32_t device,
+    binder::Status getStreamVolumeIndex(AudioStreamType stream,
+                                        const AudioDeviceDescription& device,
                                         int32_t* _aidl_return) override;
     binder::Status setVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
-                                               int32_t device, int32_t index) override;
+                                               const AudioDeviceDescription& device,
+                                               int32_t index) override;
     binder::Status getVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
-                                               int32_t device, int32_t* _aidl_return) override;
+                                               const AudioDeviceDescription& device,
+                                               int32_t* _aidl_return) override;
     binder::Status getMaxVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
                                                   int32_t* _aidl_return) override;
     binder::Status getMinVolumeIndexForAttributes(const media::AudioAttributesInternal& attr,
                                                   int32_t* _aidl_return) override;
-    binder::Status getStrategyForStream(media::AudioStreamType stream,
+    binder::Status getStrategyForStream(AudioStreamType stream,
                                         int32_t* _aidl_return) override;
-    binder::Status getDevicesForStream(media::AudioStreamType stream,
-                                       int32_t* _aidl_return) override;
     binder::Status getDevicesForAttributes(const media::AudioAttributesEx& attr,
-                                           std::vector<media::AudioDevice>* _aidl_return) override;
+                                           bool forVolume,
+                                           std::vector<AudioDevice>* _aidl_return) override;
     binder::Status getOutputForEffect(const media::EffectDescriptor& desc,
                                       int32_t* _aidl_return) override;
     binder::Status registerEffect(const media::EffectDescriptor& desc, int32_t io, int32_t strategy,
@@ -131,42 +144,42 @@
     binder::Status unregisterEffect(int32_t id) override;
     binder::Status setEffectEnabled(int32_t id, bool enabled) override;
     binder::Status moveEffectsToIo(const std::vector<int32_t>& ids, int32_t io) override;
-    binder::Status isStreamActive(media::AudioStreamType stream, int32_t inPastMs,
+    binder::Status isStreamActive(AudioStreamType stream, int32_t inPastMs,
                                   bool* _aidl_return) override;
-    binder::Status isStreamActiveRemotely(media::AudioStreamType stream, int32_t inPastMs,
+    binder::Status isStreamActiveRemotely(AudioStreamType stream, int32_t inPastMs,
                                           bool* _aidl_return) override;
-    binder::Status isSourceActive(media::AudioSourceType source, bool* _aidl_return) override;
+    binder::Status isSourceActive(AudioSource source, bool* _aidl_return) override;
     binder::Status queryDefaultPreProcessing(
-            int32_t audioSession, media::Int* count,
+            int32_t audioSession, Int* count,
             std::vector<media::EffectDescriptor>* _aidl_return) override;
-    binder::Status addSourceDefaultEffect(const media::AudioUuid& type,
+    binder::Status addSourceDefaultEffect(const AudioUuid& type,
                                           const std::string& opPackageName,
-                                          const media::AudioUuid& uuid, int32_t priority,
-                                          media::AudioSourceType source,
+                                          const AudioUuid& uuid, int32_t priority,
+                                          AudioSource source,
                                           int32_t* _aidl_return) override;
-    binder::Status addStreamDefaultEffect(const media::AudioUuid& type,
+    binder::Status addStreamDefaultEffect(const AudioUuid& type,
                                           const std::string& opPackageName,
-                                          const media::AudioUuid& uuid, int32_t priority,
-                                          media::AudioUsage usage, int32_t* _aidl_return) override;
+                                          const AudioUuid& uuid, int32_t priority,
+                                          AudioUsage usage, int32_t* _aidl_return) override;
     binder::Status removeSourceDefaultEffect(int32_t id) override;
     binder::Status removeStreamDefaultEffect(int32_t id) override;
     binder::Status setSupportedSystemUsages(
-            const std::vector<media::AudioUsage>& systemUsages) override;
+            const std::vector<AudioUsage>& systemUsages) override;
     binder::Status setAllowedCapturePolicy(int32_t uid, int32_t capturePolicy) override;
-    binder::Status getOffloadSupport(const media::AudioOffloadInfo& info,
+    binder::Status getOffloadSupport(const media::audio::common::AudioOffloadInfo& info,
                                      media::AudioOffloadMode* _aidl_return) override;
-    binder::Status isDirectOutputSupported(const media::AudioConfigBase& config,
+    binder::Status isDirectOutputSupported(const AudioConfigBase& config,
                                            const media::AudioAttributesInternal& attributes,
                                            bool* _aidl_return) override;
     binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
-                                  media::Int* count, std::vector<media::AudioPort>* ports,
+                                  Int* count, std::vector<media::AudioPort>* ports,
                                   int32_t* _aidl_return) override;
-    binder::Status getAudioPort(const media::AudioPort& port,
+    binder::Status getAudioPort(int portId,
                                 media::AudioPort* _aidl_return) override;
     binder::Status createAudioPatch(const media::AudioPatch& patch, int32_t handle,
                                     int32_t* _aidl_return) override;
     binder::Status releaseAudioPatch(int32_t handle) override;
-    binder::Status listAudioPatches(media::Int* count, std::vector<media::AudioPatch>* patches,
+    binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatch>* patches,
                                     int32_t* _aidl_return) override;
     binder::Status setAudioPortConfig(const media::AudioPortConfig& config) override;
     binder::Status registerClient(const sp<media::IAudioPolicyServiceClient>& client) override;
@@ -174,15 +187,15 @@
     binder::Status setAudioVolumeGroupCallbacksEnabled(bool enabled) override;
     binder::Status acquireSoundTriggerSession(media::SoundTriggerSession* _aidl_return) override;
     binder::Status releaseSoundTriggerSession(int32_t session) override;
-    binder::Status getPhoneState(media::AudioMode* _aidl_return) override;
+    binder::Status getPhoneState(AudioMode* _aidl_return) override;
     binder::Status registerPolicyMixes(const std::vector<media::AudioMix>& mixes,
                                        bool registration) override;
     binder::Status setUidDeviceAffinities(int32_t uid,
-                                          const std::vector<media::AudioDevice>& devices) override;
+                                          const std::vector<AudioDevice>& devices) override;
     binder::Status removeUidDeviceAffinities(int32_t uid) override;
     binder::Status setUserIdDeviceAffinities(
             int32_t userId,
-            const std::vector<media::AudioDevice>& devices) override;
+            const std::vector<AudioDevice>& devices) override;
     binder::Status removeUserIdDeviceAffinities(int32_t userId) override;
     binder::Status startAudioSource(const media::AudioPortConfig& source,
                                     const media::AudioAttributesInternal& attributes,
@@ -190,22 +203,25 @@
     binder::Status stopAudioSource(int32_t portId) override;
     binder::Status setMasterMono(bool mono) override;
     binder::Status getMasterMono(bool* _aidl_return) override;
-    binder::Status getStreamVolumeDB(media::AudioStreamType stream, int32_t index, int32_t device,
+    binder::Status getStreamVolumeDB(AudioStreamType stream, int32_t index,
+                                     const AudioDeviceDescription& device,
                                      float* _aidl_return) override;
-    binder::Status getSurroundFormats(media::Int* count,
-                                      std::vector<media::audio::common::AudioFormat>* formats,
+    binder::Status getSurroundFormats(Int* count,
+                                      std::vector<AudioFormatDescription>* formats,
                                       std::vector<bool>* formatsEnabled) override;
     binder::Status getReportedSurroundFormats(
-            media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) override;
-    binder::Status getHwOffloadEncodingFormatsSupportedForA2DP(
-            std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
-    binder::Status setSurroundFormatEnabled(media::audio::common::AudioFormat audioFormat,
+            Int* count, std::vector<AudioFormatDescription>* formats) override;
+    binder::Status getHwOffloadFormatsSupportedForBluetoothMedia(
+            const AudioDeviceDescription& device,
+            std::vector<AudioFormatDescription>* _aidl_return) override;
+    binder::Status setSurroundFormatEnabled(const AudioFormatDescription& audioFormat,
                                             bool enabled) override;
-    binder::Status setAssistantUid(int32_t uid) override;
-    binder::Status setHotwordDetectionServiceUid(int32_t uid) override;
+    binder::Status setAssistantServicesUids(const std::vector<int32_t>& uids) override;
+    binder::Status setActiveAssistantServicesUids(const std::vector<int32_t>& activeUids) override;
     binder::Status setA11yServicesUids(const std::vector<int32_t>& uids) override;
     binder::Status setCurrentImeUid(int32_t uid) override;
     binder::Status isHapticPlaybackSupported(bool* _aidl_return) override;
+    binder::Status isUltrasoundSupported(bool* _aidl_return) override;
     binder::Status listAudioProductStrategies(
             std::vector<media::AudioProductStrategy>* _aidl_return) override;
     binder::Status getProductStrategyFromAudioAttributes(const media::AudioAttributesEx& aa,
@@ -220,29 +236,29 @@
     binder::Status isCallScreenModeSupported(bool* _aidl_return) override;
     binder::Status setDevicesRoleForStrategy(
             int32_t strategy, media::DeviceRole role,
-            const std::vector<media::AudioDevice>& devices) override;
+            const std::vector<AudioDevice>& devices) override;
     binder::Status removeDevicesRoleForStrategy(int32_t strategy, media::DeviceRole role) override;
     binder::Status getDevicesForRoleAndStrategy(
             int32_t strategy, media::DeviceRole role,
-            std::vector<media::AudioDevice>* _aidl_return) override;
+            std::vector<AudioDevice>* _aidl_return) override;
     binder::Status setDevicesRoleForCapturePreset(
-            media::AudioSourceType audioSource,
+            AudioSource audioSource,
             media::DeviceRole role,
-            const std::vector<media::AudioDevice>& devices) override;
+            const std::vector<AudioDevice>& devices) override;
     binder::Status addDevicesRoleForCapturePreset(
-            media::AudioSourceType audioSource,
+            AudioSource audioSource,
             media::DeviceRole role,
-            const std::vector<media::AudioDevice>& devices) override;
+            const std::vector<AudioDevice>& devices) override;
     binder::Status removeDevicesRoleForCapturePreset(
-            media::AudioSourceType audioSource,
+            AudioSource audioSource,
             media::DeviceRole role,
-            const std::vector<media::AudioDevice>& devices) override;
-    binder::Status clearDevicesRoleForCapturePreset(media::AudioSourceType audioSource,
+            const std::vector<AudioDevice>& devices) override;
+    binder::Status clearDevicesRoleForCapturePreset(AudioSource audioSource,
                                                     media::DeviceRole role) override;
     binder::Status getDevicesForRoleAndCapturePreset(
-            media::AudioSourceType audioSource,
+            AudioSource audioSource,
             media::DeviceRole role,
-            std::vector<media::AudioDevice>* _aidl_return) override;
+            std::vector<AudioDevice>* _aidl_return) override;
     binder::Status registerSoundTriggerCaptureStateListener(
             const sp<media::ICaptureStateListener>& listener, bool* _aidl_return) override;
 
@@ -250,10 +266,17 @@
             media::GetSpatializerResponse* _aidl_return) override;
     binder::Status canBeSpatialized(
             const std::optional<media::AudioAttributesInternal>& attr,
-            const std::optional<media::AudioConfig>& config,
-            const std::vector<media::AudioDevice>& devices,
+            const std::optional<AudioConfig>& config,
+            const std::vector<AudioDevice>& devices,
             bool* _aidl_return) override;
 
+    binder::Status getDirectPlaybackSupport(const media::AudioAttributesInternal& attr,
+                                            const AudioConfig& config,
+                                            media::AudioDirectMode* _aidl_return) override;
+
+    binder::Status getDirectProfilesForAttributes(const media::AudioAttributesInternal& attr,
+                        std::vector<media::audio::common::AudioProfile>* _aidl_return) override;
+
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
     // IBinder::DeathRecipient
@@ -320,6 +343,9 @@
     void onRoutingUpdated();
     void doOnRoutingUpdated();
 
+    void onVolumeRangeInitRequest();
+    void doOnVolumeRangeInitRequest();
+
     /**
      * Spatializer SpatializerPolicyCallback implementation.
      * onCheckSpatializer() sends an event on mOutputCommandThread which executes
@@ -327,9 +353,13 @@
      * by audio policy manager and attach/detach the spatializer effect accordingly.
      */
     void onCheckSpatializer() override;
-    void onCheckSpatializer_l();
+    void onCheckSpatializer_l() REQUIRES(mLock);
     void doOnCheckSpatializer();
 
+    void onUpdateActiveSpatializerTracks_l() REQUIRES(mLock);
+    void doOnUpdateActiveSpatializerTracks();
+
+
     void setEffectSuspended(int effectId,
                             audio_session_t sessionId,
                             bool suspended);
@@ -367,8 +397,9 @@
     app_state_t apmStatFromAmState(int amState);
 
     bool isSupportedSystemUsage(audio_usage_t usage);
-    status_t validateUsage(audio_usage_t usage);
-    status_t validateUsage(audio_usage_t usage, const AttributionSourceState& attributionSource);
+    status_t validateUsage(const audio_attributes_t& attr);
+    status_t validateUsage(const audio_attributes_t& attr,
+                           const AttributionSourceState& attributionSource);
 
     void updateUidStates();
     void updateUidStates_l() REQUIRES(mLock);
@@ -394,7 +425,7 @@
     public:
         explicit UidPolicy(wp<AudioPolicyService> service)
                 : mService(service), mObserverRegistered(false),
-                  mAssistantUid(0), mHotwordDetectionServiceUid(0), mCurrentImeUid(0),
+                  mCurrentImeUid(0),
                   mRttEnabled(false) {}
 
         void registerSelf();
@@ -405,13 +436,10 @@
 
         bool isUidActive(uid_t uid);
         int getUidState(uid_t uid);
-        void setAssistantUid(uid_t uid) { mAssistantUid = uid; };
-        void setHotwordDetectionServiceUid(uid_t uid) { mHotwordDetectionServiceUid = uid; }
-        bool isAssistantUid(uid_t uid) const {
-            // The HotwordDetectionService is part of the Assistant package but runs with a separate
-            // (isolated) uid, so we check for either uid here.
-            return uid == mAssistantUid || uid == mHotwordDetectionServiceUid;
-        }
+        void setAssistantUids(const std::vector<uid_t>& uids);
+        bool isAssistantUid(uid_t uid);
+        void setActiveAssistantUids(const std::vector<uid_t>& activeUids);
+        bool isActiveAssistantUid(uid_t uid);
         void setA11yUids(const std::vector<uid_t>& uids) { mA11yUids.clear(); mA11yUids = uids; }
         bool isA11yUid(uid_t uid);
         bool isA11yOnTop();
@@ -425,7 +453,8 @@
         void onUidGone(uid_t uid, bool disabled) override;
         void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
-                int32_t capability);
+                int32_t capability) override;
+        void onUidProcAdjChanged(uid_t uid) override;
 
         void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
         void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
@@ -433,6 +462,8 @@
         void updateUid(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
                        uid_t uid, bool active, int state, bool insert);
 
+        void dumpInternals(int fd);
+
      private:
         void notifyService();
         void updateOverrideUid(uid_t uid, bool active, bool insert);
@@ -446,8 +477,8 @@
         bool mObserverRegistered = false;
         std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids;
         std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids;
-        uid_t mAssistantUid = -1;
-        uid_t mHotwordDetectionServiceUid = -1;
+        std::vector<uid_t> mAssistantUids;
+        std::vector<uid_t> mActiveAssistantUids;
         std::vector<uid_t> mA11yUids;
         uid_t mCurrentImeUid = -1;
         bool mRttEnabled = false;
@@ -463,12 +494,12 @@
                     : mService(service) {}
 
             void registerSelf();
-            void registerSelfForMicrophoneOnly(int userId);
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
 
-            binder::Status onSensorPrivacyChanged(bool enabled);
+            binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
+                                                  bool enabled);
 
         private:
             wp<AudioPolicyService> mService;
@@ -501,7 +532,9 @@
             AUDIO_MODULES_UPDATE,
             ROUTING_UPDATED,
             UPDATE_UID_STATES,
-            CHECK_SPATIALIZER
+            CHECK_SPATIALIZER_OUTPUT, // verify if spatializer effect should be created or moved
+            UPDATE_ACTIVE_SPATIALIZER_TRACKS, // Update active track counts on spalializer output
+            VOL_RANGE_INIT_REQUEST, // request to reset the volume range indices
         };
 
         AudioCommandThread (String8 name, const wp<AudioPolicyService>& service);
@@ -551,6 +584,9 @@
                     void        routingChangedCommand();
                     void        updateUidStatesCommand();
                     void        checkSpatializerCommand();
+                    void        updateActiveSpatializerTracksCommand();
+                    void        volRangeInitReqCommand();
+
                     void        insertCommand_l(AudioCommand *command, int delayMs = 0);
     private:
         class AudioCommandData;
@@ -772,6 +808,8 @@
 
         virtual void onRoutingUpdated();
 
+        virtual void onVolumeRangeInitRequest();
+
         virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
 
         void setSoundTriggerCaptureState(bool active) override;
@@ -781,6 +819,9 @@
         status_t updateSecondaryOutputs(
                 const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
 
+        status_t setDeviceConnectedState(
+                const struct audio_port_v7 *port, bool connected) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
@@ -808,6 +849,7 @@
                                                     audio_patch_handle_t patchHandle,
                                                     audio_source_t source);
                             void      onRoutingUpdated();
+                            void      onVolumeRangeInitRequest();
                             void      setAudioPortCallbacksEnabled(bool enabled);
                             void setAudioVolumeGroupCallbacksEnabled(bool enabled);
 
@@ -937,12 +979,14 @@
                 AudioPlaybackClient(const audio_attributes_t attributes,
                       const audio_io_handle_t io, AttributionSourceState attributionSource,
                             const audio_session_t session, audio_port_handle_t portId,
-                            audio_port_handle_t deviceId, audio_stream_type_t stream) :
+                            audio_port_handle_t deviceId, audio_stream_type_t stream,
+                            bool isSpatialized) :
                     AudioClient(attributes, io, attributionSource, session, portId,
-                        deviceId), stream(stream) {}
+                        deviceId), stream(stream), isSpatialized(isSpatialized)  {}
                 ~AudioPlaybackClient() override = default;
 
         const audio_stream_type_t stream;
+        const bool isSpatialized;
     };
 
     void getPlaybackClientAndEffects(audio_port_handle_t portId,
@@ -972,6 +1016,17 @@
     void loadAudioPolicyManager();
     void unloadAudioPolicyManager();
 
+    /**
+     * Returns the number of active audio tracks on the specified output mixer.
+     * The query can be specified to only include spatialized audio tracks or consider
+     * all tracks.
+     * @param output the I/O handle of the output mixer to consider
+     * @param spatializedOnly true if only spatialized tracks should be considered
+     * @return the number of active tracks.
+     */
+    size_t countActiveClientsOnOutput_l(
+        audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mLock);
+
     mutable Mutex mLock;    // prevents concurrent access to AudioPolicy manager functions changing
                             // device connection state  or routing
     // Note: lock acquisition order is always mLock > mEffectsLock:
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 6745005..c199a76 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/MediaMetricsItem.h>
 #include <media/ShmemCompat.h>
 #include <mediautils/ServiceUtilities.h>
 #include <utils/Thread.h>
@@ -57,6 +58,19 @@
        if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
        std::move(_tmp.value()); })
 
+audio_channel_mask_t getMaxChannelMask(std::vector<audio_channel_mask_t> masks) {
+    uint32_t maxCount = 0;
+    audio_channel_mask_t maxMask = AUDIO_CHANNEL_NONE;
+    for (auto mask : masks) {
+        const size_t count = audio_channel_count_from_out_mask(mask);
+        if (count > maxCount) {
+            maxMask = mask;
+            maxCount = count;
+        }
+    }
+    return maxMask;
+}
+
 // ---------------------------------------------------------------------------
 
 class Spatializer::EngineCallbackHandler : public AHandler {
@@ -214,23 +228,94 @@
     status_t status = getHalParameter<false>(effect, SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED,
                                          &supportsHeadTracking);
     if (status != NO_ERROR) {
+        ALOGW("%s: cannot get SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED", __func__);
         return status;
     }
-// Disable head tracking until head sensor activity is properly controlled.
-//    mSupportsHeadTracking = supportsHeadTracking[0];
-    mSupportsHeadTracking = false;
+    mSupportsHeadTracking = supportsHeadTracking[0];
 
-    status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS, &mLevels);
+    std::vector<media::SpatializationLevel> spatializationLevels;
+    status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS,
+            &spatializationLevels);
     if (status != NO_ERROR) {
+        ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_LEVELS", __func__);
         return status;
     }
+    bool noneLevelFound = false;
+    bool activeLevelFound = false;
+    for (const auto spatializationLevel : spatializationLevels) {
+        if (!aidl_utils::isValidEnum(spatializationLevel)) {
+            ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+            continue;
+        }
+        if (spatializationLevel == media::SpatializationLevel::NONE) {
+            noneLevelFound = true;
+        } else {
+            activeLevelFound = true;
+        }
+        // we don't detect duplicates.
+        mLevels.emplace_back(spatializationLevel);
+    }
+    if (!noneLevelFound || !activeLevelFound) {
+        ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_LEVELS must include NONE"
+                " and another valid level",  __func__);
+        return BAD_VALUE;
+    }
+
+    std::vector<media::SpatializationMode> spatializationModes;
     status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
-                                &mSpatializationModes);
+            &spatializationModes);
     if (status != NO_ERROR) {
+        ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES", __func__);
         return status;
     }
-    return getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
-                                 &mChannelMasks);
+    for (const auto spatializationMode : spatializationModes) {
+        if (!aidl_utils::isValidEnum(spatializationMode)) {
+            ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+            continue;
+        }
+        // we don't detect duplicates.
+        mSpatializationModes.emplace_back(spatializationMode);
+    }
+    if (mSpatializationModes.empty()) {
+        ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES reports empty", __func__);
+        return BAD_VALUE;
+    }
+
+    std::vector<audio_channel_mask_t> channelMasks;
+    status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
+                                 &channelMasks);
+    if (status != NO_ERROR) {
+        ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS", __func__);
+        return status;
+    }
+    for (const auto channelMask : channelMasks) {
+        if (!audio_is_channel_mask_spatialized(channelMask)) {
+            ALOGW("%s: ignoring channelMask:%#x", __func__, channelMask);
+            continue;
+        }
+        // we don't detect duplicates.
+        mChannelMasks.emplace_back(channelMask);
+    }
+    if (mChannelMasks.empty()) {
+        ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS reports empty", __func__);
+        return BAD_VALUE;
+    }
+
+    // Currently we expose only RELATIVE_WORLD.
+    // This is a limitation of the head tracking library based on a UX choice.
+    mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::DISABLED);
+    if (mSupportsHeadTracking) {
+        mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
+    }
+    mediametrics::LogItem(mMetricsId)
+        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
+        .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)getMaxChannelMask(mChannelMasks))
+        .set(AMEDIAMETRICS_PROP_LEVELS, aidl_utils::enumsToString(mLevels))
+        .set(AMEDIAMETRICS_PROP_MODES, aidl_utils::enumsToString(mSpatializationModes))
+        .set(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, aidl_utils::enumsToString(mHeadTrackingModes))
+        .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+        .record();
+    return NO_ERROR;
 }
 
 /** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -238,12 +323,7 @@
     std::lock_guard lock(mLock);
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     // For now use highest supported channel count
-    uint32_t maxCount = 0;
-    for ( auto mask : mChannelMasks) {
-        if (audio_channel_count_from_out_mask(mask) > maxCount) {
-            config.channel_mask = mask;
-        }
-    }
+    config.channel_mask = getMaxChannelMask(mChannelMasks);
     return config;
 }
 
@@ -280,7 +360,7 @@
     if (levels == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-    levels->push_back(SpatializationLevel::NONE);
+    // SpatializationLevel::NONE is already required from the effect or we don't load it.
     levels->insert(levels->end(), mLevels.begin(), mLevels.end());
     return Status::ok();
 }
@@ -300,8 +380,9 @@
         callback = mSpatializerCallback;
 
         if (levelChanged && mEngine != nullptr) {
-            setEffectParameter_l(SPATIALIZER_PARAM_LEVEL, std::vector<SpatializationLevel>{level});
+            checkEngineState_l();
         }
+        checkSensorsState_l();
     }
 
     if (levelChanged) {
@@ -340,16 +421,7 @@
     if (modes == nullptr) {
         return binderStatusFromStatusT(BAD_VALUE);
     }
-
-    modes->push_back(SpatializerHeadTrackingMode::DISABLED);
-    if (mSupportsHeadTracking) {
-        if (mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
-            modes->push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
-            if (mScreenSensor != SpatializerPoseController::INVALID_SENSOR) {
-                modes->push_back(SpatializerHeadTrackingMode::RELATIVE_SCREEN);
-            }
-        }
-    }
+    modes->insert(modes->end(), mHeadTrackingModes.begin(), mHeadTrackingModes.end());
     return Status::ok();
 }
 
@@ -374,9 +446,8 @@
             break;
     }
 
-    if (mPoseController != nullptr) {
-        mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
-    }
+    checkPoseController_l();
+    checkSensorsState_l();
 
     return Status::ok();
 }
@@ -448,9 +519,10 @@
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
     std::lock_guard lock(mLock);
-    mHeadSensor = sensorHandle;
-    if (mPoseController != nullptr) {
-        mPoseController->setHeadSensor(mHeadSensor);
+    if (mHeadSensor != sensorHandle) {
+        mHeadSensor = sensorHandle;
+        checkPoseController_l();
+        checkSensorsState_l();
     }
     return Status::ok();
 }
@@ -461,9 +533,12 @@
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
     std::lock_guard lock(mLock);
-    mScreenSensor = sensorHandle;
-    if (mPoseController != nullptr) {
-        mPoseController->setScreenSensor(mScreenSensor);
+    if (mScreenSensor != sensorHandle) {
+        mScreenSensor = sensorHandle;
+        // TODO: consider a new method setHeadAndScreenSensor()
+        // because we generally set both at the same time.
+        // This will avoid duplicated work and recentering.
+        checkSensorsState_l();
     }
     return Status::ok();
 }
@@ -559,7 +634,6 @@
     auto vec = headToStage.toVector();
     LOG_ALWAYS_FATAL_IF(vec.size() != sHeadPoseKeys.size(),
             "%s invalid head to stage vector size %zu", __func__, vec.size());
-
     sp<AMessage> msg =
             new AMessage(EngineCallbackHandler::kWhatOnHeadToStagePose, mHandler);
     for (size_t i = 0 ; i < sHeadPoseKeys.size(); i++) {
@@ -616,6 +690,10 @@
             }
         }
         mActualHeadTrackingMode = spatializerMode;
+        if (mEngine != nullptr) {
+            setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                                 std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+        }
         callback = mHeadTrackingCallback;
     }
     if (callback != nullptr) {
@@ -623,8 +701,7 @@
     }
 }
 
-status_t Spatializer::attachOutput(audio_io_handle_t output) {
-    std::shared_ptr<SpatializerPoseController> poseController;
+status_t Spatializer::attachOutput(audio_io_handle_t output, size_t numActiveTracks) {
     bool outputChanged = false;
     sp<media::INativeSpatializerCallback> callback;
 
@@ -636,6 +713,7 @@
             // remove FX instance
             mEngine->setEnabled(false);
             mEngine.clear();
+            mPoseController.reset();
         }
         // create FX instance on output
         AttributionSourceState attributionSource = AttributionSourceState();
@@ -649,32 +727,17 @@
             return status;
         }
 
-        setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
-                             std::vector<SpatializationLevel>{mLevel});
-        setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                             std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
-
-        mEngine->setEnabled(true);
         outputChanged = mOutput != output;
         mOutput = output;
+        mNumActiveTracks = numActiveTracks;
 
+        checkEngineState_l();
         if (mSupportsHeadTracking) {
-            mPoseController = std::make_shared<SpatializerPoseController>(
-                    static_cast<SpatializerPoseController::Listener*>(this), 10ms, 50ms);
-            LOG_ALWAYS_FATAL_IF(mPoseController == nullptr,
-                                "%s could not allocate pose controller", __func__);
-
-            mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
-            mPoseController->setHeadSensor(mHeadSensor);
-            mPoseController->setScreenSensor(mScreenSensor);
-            mPoseController->setDisplayOrientation(mDisplayOrientation);
-            poseController = mPoseController;
+            checkPoseController_l();
+            checkSensorsState_l();
         }
         callback = mSpatializerCallback;
     }
-    if (poseController != nullptr) {
-        poseController->waitUntilCalculated();
-    }
 
     if (outputChanged && callback != nullptr) {
         callback->onOutputChanged(output);
@@ -699,7 +762,6 @@
         output = mOutput;
         mOutput = AUDIO_IO_HANDLE_NONE;
         mPoseController.reset();
-
         callback = mSpatializerCallback;
     }
 
@@ -709,6 +771,64 @@
     return output;
 }
 
+void Spatializer::updateActiveTracks(size_t numActiveTracks) {
+    std::lock_guard lock(mLock);
+    if (mNumActiveTracks != numActiveTracks) {
+        mNumActiveTracks = numActiveTracks;
+        checkEngineState_l();
+        checkSensorsState_l();
+    }
+}
+
+void Spatializer::checkSensorsState_l() {
+    if (mSupportsHeadTracking && mPoseController != nullptr) {
+        if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+            && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+            && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+            mPoseController->setHeadSensor(mHeadSensor);
+            mPoseController->setScreenSensor(mScreenSensor);
+        } else {
+            mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+            mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+        }
+    }
+}
+
+void Spatializer::checkEngineState_l() {
+    if (mEngine != nullptr) {
+        if (mLevel != SpatializationLevel::NONE && mNumActiveTracks > 0) {
+            mEngine->setEnabled(true);
+            setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
+                    std::vector<SpatializationLevel>{mLevel});
+            setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                    std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+        } else {
+            setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
+                    std::vector<SpatializationLevel>{SpatializationLevel::NONE});
+            mEngine->setEnabled(false);
+        }
+    }
+}
+
+void Spatializer::checkPoseController_l() {
+    bool isControllerNeeded = mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+            && mHeadSensor != SpatializerPoseController::INVALID_SENSOR;
+
+    if (isControllerNeeded && mPoseController == nullptr) {
+        mPoseController = std::make_shared<SpatializerPoseController>(
+                static_cast<SpatializerPoseController::Listener*>(this),
+                10ms, std::nullopt);
+        LOG_ALWAYS_FATAL_IF(mPoseController == nullptr,
+                            "%s could not allocate pose controller", __func__);
+        mPoseController->setDisplayOrientation(mDisplayOrientation);
+    } else if (!isControllerNeeded && mPoseController != nullptr) {
+        mPoseController.reset();
+    }
+    if (mPoseController != nullptr) {
+        mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
+    }
+}
+
 void Spatializer::calculateHeadPose() {
     ALOGV("%s", __func__);
     std::lock_guard lock(mLock);
@@ -725,11 +845,11 @@
     switch (event) {
         case AudioEffect::EVENT_FRAMES_PROCESSED: {
             int frames = info == nullptr ? 0 : *(int*)info;
-            // ALOGD("%s frames processed %d for me %p", __func__, frames, me);
+            ALOGV("%s frames processed %d for me %p", __func__, frames, me);
             me->postFramesProcessedMsg(frames);
         } break;
         default:
-            ALOGD("%s event %d", __func__, event);
+            ALOGV("%s event %d", __func__, event);
             break;
     }
 }
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 4d77b78..29f4b08 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -135,7 +135,7 @@
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
-    status_t attachOutput(audio_io_handle_t output);
+    status_t attachOutput(audio_io_handle_t output, size_t numActiveTracks);
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is closed and the spatializer engine must be release.
      */
@@ -143,6 +143,8 @@
     /** Returns the output stream the spatializer is attached to. */
     audio_io_handle_t getOutput() const { std::lock_guard lock(mLock); return mOutput; }
 
+    void updateActiveTracks(size_t numActiveTracks);
+
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
     audio_config_base_t getAudioInConfig() const;
 
@@ -274,10 +276,32 @@
 
     void postFramesProcessedMsg(int frames);
 
+    /**
+     * Checks if head and screen sensors must be actively monitored based on
+     * spatializer state and playback activity and configures the pose controller
+     * accordingly.
+     */
+    void checkSensorsState_l() REQUIRES(mLock);
+
+    /**
+     * Checks if the head pose controller should be created or destroyed according
+     * to desired head tracking mode.
+     */
+    void checkPoseController_l() REQUIRES(mLock);
+
+    /**
+     * Checks if the spatializer effect should be enabled based on
+     * playback activity and requested level.
+     */
+    void checkEngineState_l() REQUIRES(mLock);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
-    SpatializerPolicyCallback* mPolicyCallback;
+    SpatializerPolicyCallback* const mPolicyCallback;
+
+    /** Currently there is only one version of the spatializer running */
+    const std::string mMetricsId = AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
 
     /** Mutex protecting internal state */
     mutable std::mutex mLock;
@@ -318,6 +342,7 @@
     float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
 
     std::vector<media::SpatializationLevel> mLevels;
+    std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
     std::vector<media::SpatializationMode> mSpatializationModes;
     std::vector<audio_channel_mask_t> mChannelMasks;
     bool mSupportsHeadTracking;
@@ -328,6 +353,8 @@
     sp<ALooper> mLooper;
     sp<EngineCallbackHandler> mHandler;
 
+    size_t mNumActiveTracks GUARDED_BY(mLock) = 0;
+
     static const std::vector<const char *> sHeadPoseKeys;
 };
 
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index ffedf63..304d44a 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "SpatializerPoseController"
 //#define LOG_NDEBUG 0
 #include <sensor/Sensor.h>
+#include <media/MediaMetricsItem.h>
 #include <utils/Log.h>
 #include <utils/SystemClock.h>
 
@@ -35,34 +36,38 @@
 namespace {
 
 // This is how fast, in m/s, we allow position to shift during rate-limiting.
-constexpr auto kMaxTranslationalVelocity = 2;
+constexpr float kMaxTranslationalVelocity = 2;
 
 // This is how fast, in rad/s, we allow rotation angle to shift during rate-limiting.
-constexpr auto kMaxRotationalVelocity = 4 * M_PI;
-
-// This should be set to the typical time scale that the translation sensors used drift in. This
-// means, loosely, for how long we can trust the reading to be "accurate enough". This would
-// determine the time constants used for high-pass filtering those readings. If the value is set
-// too high, we may experience drift. If it is set too low, we may experience poses tending toward
-// identity too fast.
-constexpr auto kTranslationalDriftTimeConstant = 20s;
-
-// This should be set to the typical time scale that the rotation sensors used drift in. This
-// means, loosely, for how long we can trust the reading to be "accurate enough". This would
-// determine the time constants used for high-pass filtering those readings. If the value is set
-// too high, we may experience drift. If it is set too low, we may experience poses tending toward
-// identity too fast.
-constexpr auto kRotationalDriftTimeConstant = 20s;
+constexpr float kMaxRotationalVelocity = 8;
 
 // This is how far into the future we predict the head pose, using linear extrapolation based on
 // twist (velocity). It should be set to a value that matches the characteristic durations of moving
 // one's head. The higher we set this, the more latency we are able to reduce, but setting this too
 // high will result in high prediction errors whenever the head accelerates (changes velocity).
-constexpr auto kPredictionDuration = 10ms;
+constexpr auto kPredictionDuration = 50ms;
 
-// After losing this many consecutive samples from either sensor, we would treat the measurement as
-// stale;
-constexpr auto kMaxLostSamples = 4;
+// After not getting a pose sample for this long, we would treat the measurement as stale.
+constexpr auto kFreshnessTimeout = 50ms;
+
+// Auto-recenter kicks in after the head has been still for this long.
+constexpr auto kAutoRecenterWindowDuration = 6s;
+
+// Auto-recenter considers head not still if translated by this much (in meters, approx).
+constexpr float kAutoRecenterTranslationThreshold = 0.1f;
+
+// Auto-recenter considers head not still if rotated by this much (in radians, approx).
+constexpr float kAutoRecenterRotationThreshold = 10.5f / 180 * M_PI;
+
+// Screen is considered to be unstable (not still) if it has moved significantly within the last
+// time window of this duration.
+constexpr auto kScreenStillnessWindowDuration = 3s;
+
+// Screen is considered to have moved significantly if translated by this much (in meter, approx).
+constexpr float kScreenStillnessTranslationThreshold = 0.1f;
+
+// Screen is considered to have moved significantly if rotated by this much (in radians, approx).
+constexpr float kScreenStillnessRotationThreshold = 7.0f / 180 * M_PI;
 
 // Time units for system clock ticks. This is what the Sensor Framework timestamps represent and
 // what we use for pose filtering.
@@ -71,20 +76,28 @@
 // How many ticks in a second.
 constexpr auto kTicksPerSecond = Ticks::period::den;
 
+std::string getSensorMetricsId(int32_t sensorId) {
+    return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR).append(std::to_string(sensorId));
+}
+
 }  // namespace
 
 SpatializerPoseController::SpatializerPoseController(Listener* listener,
-                                                     std::chrono::microseconds sensorPeriod,
-                                                     std::chrono::microseconds maxUpdatePeriod)
+                                        std::chrono::microseconds sensorPeriod,
+                                        std::optional<std::chrono::microseconds> maxUpdatePeriod)
     : mListener(listener),
       mSensorPeriod(sensorPeriod),
       mProcessor(createHeadTrackingProcessor(HeadTrackingProcessor::Options{
               .maxTranslationalVelocity = kMaxTranslationalVelocity / kTicksPerSecond,
               .maxRotationalVelocity = kMaxRotationalVelocity / kTicksPerSecond,
-              .translationalDriftTimeConstant = Ticks(kTranslationalDriftTimeConstant).count(),
-              .rotationalDriftTimeConstant = Ticks(kRotationalDriftTimeConstant).count(),
-              .freshnessTimeout = Ticks(sensorPeriod * kMaxLostSamples).count(),
+              .freshnessTimeout = Ticks(kFreshnessTimeout).count(),
               .predictionDuration = Ticks(kPredictionDuration).count(),
+              .autoRecenterWindowDuration = Ticks(kAutoRecenterWindowDuration).count(),
+              .autoRecenterTranslationalThreshold = kAutoRecenterTranslationThreshold,
+              .autoRecenterRotationalThreshold = kAutoRecenterRotationThreshold,
+              .screenStillnessWindowDuration = Ticks(kScreenStillnessWindowDuration).count(),
+              .screenStillnessTranslationalThreshold = kScreenStillnessTranslationThreshold,
+              .screenStillnessRotationalThreshold = kScreenStillnessRotationThreshold,
       })),
       mPoseProvider(SensorPoseProvider::create("headtracker", this)),
       mThread([this, maxUpdatePeriod] {
@@ -93,8 +106,12 @@
               std::optional<HeadTrackingMode> modeIfChanged;
               {
                   std::unique_lock lock(mMutex);
-                  mCondVar.wait_for(lock, maxUpdatePeriod,
-                                    [this] { return mShouldExit || mShouldCalculate; });
+                  if (maxUpdatePeriod.has_value()) {
+                      mCondVar.wait_for(lock, maxUpdatePeriod.value(),
+                                        [this] { return mShouldExit || mShouldCalculate; });
+                  } else {
+                      mCondVar.wait(lock, [this] { return mShouldExit || mShouldCalculate; });
+                  }
                   if (mShouldExit) {
                       ALOGV("Exiting thread");
                       return;
@@ -132,9 +149,16 @@
 
 void SpatializerPoseController::setHeadSensor(int32_t sensor) {
     std::lock_guard lock(mMutex);
+    if (sensor == mHeadSensor) return;
+    ALOGV("%s: new sensor:%d  mHeadSensor:%d  mScreenSensor:%d",
+            __func__, sensor, mHeadSensor, mScreenSensor);
+
     // Stop current sensor, if valid and different from the other sensor.
     if (mHeadSensor != INVALID_SENSOR && mHeadSensor != mScreenSensor) {
         mPoseProvider->stopSensor(mHeadSensor);
+        mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
+            .record();
     }
 
     if (sensor != INVALID_SENSOR) {
@@ -142,6 +166,15 @@
             // Start new sensor.
             mHeadSensor =
                     mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
+            if (mHeadSensor != INVALID_SENSOR) {
+                auto sensor = mPoseProvider->getSensorByHandle(mHeadSensor);
+                std::string stringType = sensor ? sensor->getStringType().c_str() : "";
+                mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
+                    .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
+                    .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_HEAD)
+                    .set(AMEDIAMETRICS_PROP_TYPE, stringType)
+                    .record();
+            }
         } else {
             // Sensor is already enabled.
             mHeadSensor = mScreenSensor;
@@ -150,14 +183,21 @@
         mHeadSensor = INVALID_SENSOR;
     }
 
-    mProcessor->recenter(true, false);
+    mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */);
 }
 
 void SpatializerPoseController::setScreenSensor(int32_t sensor) {
     std::lock_guard lock(mMutex);
+    if (sensor == mScreenSensor) return;
+    ALOGV("%s: new sensor:%d  mHeadSensor:%d  mScreenSensor:%d",
+            __func__, sensor, mHeadSensor, mScreenSensor);
+
     // Stop current sensor, if valid and different from the other sensor.
     if (mScreenSensor != INVALID_SENSOR && mScreenSensor != mHeadSensor) {
         mPoseProvider->stopSensor(mScreenSensor);
+        mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
+            .record();
     }
 
     if (sensor != INVALID_SENSOR) {
@@ -165,6 +205,13 @@
             // Start new sensor.
             mScreenSensor =
                     mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
+            auto sensor = mPoseProvider->getSensorByHandle(mScreenSensor);
+            std::string stringType = sensor ? sensor->getStringType().c_str() : "";
+            mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
+                .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN)
+                .set(AMEDIAMETRICS_PROP_TYPE, stringType)
+                .record();
         } else {
             // Sensor is already enabled.
             mScreenSensor = mHeadSensor;
@@ -173,7 +220,7 @@
         mScreenSensor = INVALID_SENSOR;
     }
 
-    mProcessor->recenter(false, true);
+    mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */);
 }
 
 void SpatializerPoseController::setDesiredMode(HeadTrackingMode mode) {
@@ -227,7 +274,8 @@
                                        const std::optional<Twist3f>& twist, bool isNewReference) {
     std::lock_guard lock(mMutex);
     if (sensor == mHeadSensor) {
-        mProcessor->setWorldToHeadPose(timestamp, pose, twist.value_or(Twist3f()));
+        mProcessor->setWorldToHeadPose(timestamp, pose,
+                                       twist.value_or(Twist3f()) / kTicksPerSecond);
         if (isNewReference) {
             mProcessor->recenter(true, false);
         }
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 2b5c189..2c6d79a 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -60,10 +60,10 @@
      * Ctor.
      * sensorPeriod determines how often to receive updates from the sensors (input rate).
      * maxUpdatePeriod determines how often to produce an output when calculateAsync() isn't
-     * invoked.
+     * invoked; passing nullopt means an output is never produced.
      */
     SpatializerPoseController(Listener* listener, std::chrono::microseconds sensorPeriod,
-                               std::chrono::microseconds maxUpdatePeriod);
+                               std::optional<std::chrono::microseconds> maxUpdatePeriod);
 
     /** Dtor. */
     ~SpatializerPoseController();
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index b296fb0..2e220bc 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -25,13 +25,13 @@
         "libmedia_helper",
         "libutils",
         "libxml2",
-        "libpermission",
+        "framework-permission-aidl-cpp",
         "libbinder",
     ],
 
     static_libs: [
         "libaudiopolicycomponents",
-        "libgmock"
+        "libgmock",
     ],
 
     header_libs: [
@@ -65,6 +65,12 @@
         "liblog",
         "libmedia_helper",
         "libutils",
+        "android.media.audio.common.types-V1-cpp",
+        "libaudioclient_aidl_conversion",
+        "libstagefright_foundation",
+        "libshmemcompat",
+        "libshmemutil",
+        "audioclient-types-aidl-cpp",
     ],
 
     static_libs: ["libaudiopolicycomponents"],
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 84b40d2..057fa58 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -103,6 +103,11 @@
         ++mAudioPortListUpdateCount;
     }
 
+    status_t setDeviceConnectedState(
+            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+        return NO_ERROR;
+    }
+
     // Helper methods for tests
     size_t getActivePatchesCount() const { return mActivePatches.size(); }
 
@@ -133,7 +138,12 @@
     }
 
     size_t getRoutingUpdatedCounter() const {
-        return mRoutingUpdatedUpdateCount; }
+        return mRoutingUpdatedUpdateCount;
+    }
+
+    void onVolumeRangeInitRequest() override {
+
+    }
 
     status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
index a5ad9b1..7343b9b 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
@@ -28,19 +28,26 @@
 class AudioPolicyManagerTestClientForHdmi : public AudioPolicyManagerTestClient {
 public:
     String8 getParameters(audio_io_handle_t /* ioHandle */, const String8&  /* keys*/ ) override {
+        AudioParameter mAudioParameters;
+        std::string formats;
+        for (const auto& f : mSupportedFormats) {
+            if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+            formats += audio_format_to_string(f);
+        }
+        mAudioParameters.add(
+                String8(AudioParameter::keyStreamSupportedFormats),
+                String8(formats.c_str()));
+        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
+        mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
         return mAudioParameters.toString();
     }
 
     void addSupportedFormat(audio_format_t format) override {
-        mAudioParameters.add(
-                String8(AudioParameter::keyStreamSupportedFormats),
-                String8(audio_format_to_string(format)));
-        mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
-        mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
+        mSupportedFormats.insert(format);
     }
 
 private:
-    AudioParameter mAudioParameters;
+    std::set<audio_format_t> mSupportedFormats;
 };
 
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 4e0735b..8a85fee 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -85,6 +85,7 @@
                                         audio_patch_handle_t patchHandle __unused,
                                         audio_source_t source __unused) override { }
     void onRoutingUpdated() override { }
+    void onVolumeRangeInitRequest() override { }
     void setEffectSuspended(int effectId __unused,
                             audio_session_t sessionId __unused,
                             bool suspended __unused) {}
@@ -96,6 +97,10 @@
             const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
         return NO_INIT;
     }
+    status_t setDeviceConnectedState(
+            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+        return NO_INIT;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 7f67940..7441f20 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -33,6 +33,10 @@
     using AudioPolicyManager::releaseMsdOutputPatches;
     using AudioPolicyManager::setMsdOutputPatches;
     using AudioPolicyManager::getAudioPatches;
+    using AudioPolicyManager::getDirectPlaybackSupport;
+    using AudioPolicyManager::getDirectProfilesForAttributes;
+    using AudioPolicyManager::setDeviceConnectionState;
+    using AudioPolicyManager::deviceToAudioPort;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
 };
 
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index df4389b..10f8dc0 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -111,16 +111,17 @@
             continue;
         }
         std::string address = "11:22:33:44:55:66";
+        media::AudioPort aidlPort;
+        ASSERT_EQ(OK, manager.deviceToAudioPort(device->type(), address.c_str(), "" /*name*/,
+                                                 &aidlPort));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
         ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
-                device->type(), AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address.c_str(),
-                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+                AUDIO_POLICY_DEVICE_STATE_AVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
         ASSERT_EQ(NO_ERROR, AudioSystem::setDeviceConnectionState(
-                device->type(), AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, address.c_str(),
-                "" /*device_name*/, AUDIO_FORMAT_DEFAULT));
+                AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, aidlPort.hal, AUDIO_FORMAT_DEFAULT));
         ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
                 AudioSystem::getDeviceConnectionState(device->type(), address.c_str()));
     }
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a16ab7d..5429176 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -137,6 +137,8 @@
             audio_port_handle_t *portId = nullptr);
     PatchCountCheck snapshotPatchCount() { return PatchCountCheck(mClient.get()); }
 
+    void getAudioPorts(audio_port_type_t type, audio_port_role_t role,
+            std::vector<audio_port_v7>* ports);
     // Tries to find a device port. If 'foundPort' isn't nullptr,
     // will generate a failure if the port hasn't been found.
     bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
@@ -151,7 +153,7 @@
 void AudioPolicyManagerTest::SetUp() {
     mClient.reset(getClient());
     mManager.reset(new AudioPolicyTestManager(mClient.get()));
-    SetUpManagerConfig();  // Subclasses may want to customize the config.
+    ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig());  // Subclasses may want to customize the config.
     ASSERT_EQ(NO_ERROR, mManager->initialize());
     ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
@@ -216,13 +218,14 @@
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource = AttributionSourceState();
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     ASSERT_EQ(OK, mManager->getOutputForAttr(
                     &attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
-                    selectedDeviceId, portId, {}, &outputType));
+                    selectedDeviceId, portId, {}, &outputType, &isSpatialized));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
 }
@@ -255,21 +258,26 @@
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
 
-bool AudioPolicyManagerTest::findDevicePort(audio_port_role_t role,
-        audio_devices_t deviceType, const std::string &address, audio_port_v7 *foundPort) {
+void AudioPolicyManagerTest::getAudioPorts(audio_port_type_t type, audio_port_role_t role,
+        std::vector<audio_port_v7>* ports) {
     uint32_t numPorts = 0;
     uint32_t generation1;
     status_t ret;
 
-    ret = mManager->listAudioPorts(role, AUDIO_PORT_TYPE_DEVICE, &numPorts, nullptr, &generation1);
-    EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
-    if (HasFailure()) return false;
+    ret = mManager->listAudioPorts(role, type, &numPorts, nullptr, &generation1);
+    ASSERT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
 
     uint32_t generation2;
-    struct audio_port_v7 ports[numPorts];
-    ret = mManager->listAudioPorts(role, AUDIO_PORT_TYPE_DEVICE, &numPorts, ports, &generation2);
-    EXPECT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
-    EXPECT_EQ(generation1, generation2) << "Generations changed during ports retrieval";
+    ports->resize(numPorts);
+    ret = mManager->listAudioPorts(role, type, &numPorts, ports->data(), &generation2);
+    ASSERT_EQ(NO_ERROR, ret) << "mManager->listAudioPorts returned error";
+    ASSERT_EQ(generation1, generation2) << "Generations changed during ports retrieval";
+}
+
+bool AudioPolicyManagerTest::findDevicePort(audio_port_role_t role,
+        audio_devices_t deviceType, const std::string &address, audio_port_v7 *foundPort) {
+    std::vector<audio_port_v7> ports;
+    getAudioPorts(AUDIO_PORT_TYPE_DEVICE, role, &ports);
     if (HasFailure()) return false;
 
     for (const auto &port : ports) {
@@ -373,6 +381,7 @@
   protected:
     void SetUpManagerConfig() override;
     void TearDown() override;
+    AudioProfileVector getDirectProfilesForAttributes(const audio_attributes_t& attr);
 
     sp<DeviceDescriptor> mMsdOutputDevice;
     sp<DeviceDescriptor> mMsdInputDevice;
@@ -401,7 +410,7 @@
 
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
-    AudioPolicyManagerTest::SetUpManagerConfig();
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUpManagerConfig());
     AudioPolicyConfig& config = mManager->getConfig();
     mMsdOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_BUS);
     sp<AudioProfile> pcmOutputProfile = new AudioProfile(
@@ -502,6 +511,13 @@
     AudioPolicyManagerTest::TearDown();
 }
 
+AudioProfileVector AudioPolicyManagerTestMsd::getDirectProfilesForAttributes(
+                                                    const audio_attributes_t& attr) {
+    AudioProfileVector audioProfilesVector;
+    mManager->getDirectProfilesForAttributes(&attr, audioProfilesVector);
+    return audioProfilesVector;
+}
+
 TEST_P(AudioPolicyManagerTestMsd, InitSuccess) {
     ASSERT_TRUE(mMsdOutputDevice);
     ASSERT_TRUE(mMsdInputDevice);
@@ -642,6 +658,200 @@
     ASSERT_EQ(1, patchCount.deltaFromSnapshot());
 }
 
+TEST_P(AudioPolicyManagerTestMsd, GetDirectProfilesForAttributesWithMsd) {
+    const audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+        AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    // count expected direct profiles for the default device
+    int countDirectProfilesPrimary = 0;
+    const auto& primary = mManager->getConfig().getHwModules()
+            .getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY);
+    for (const auto outputProfile : primary->getOutputProfiles()) {
+        if (outputProfile->asAudioPort()->isDirectOutput()) {
+            countDirectProfilesPrimary += outputProfile->asAudioPort()->getAudioProfiles().size();
+        }
+    }
+
+    // count expected direct profiles for the msd device
+    int countDirectProfilesMsd = 0;
+    const auto& msd = mManager->getConfig().getHwModules()
+            .getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+    for (const auto outputProfile : msd->getOutputProfiles()) {
+        if (outputProfile->asAudioPort()->isDirectOutput()) {
+            countDirectProfilesMsd += outputProfile->asAudioPort()->getAudioProfiles().size();
+        }
+    }
+
+    // before setting up MSD audio patches we only have the primary hal direct profiles
+    ASSERT_EQ(countDirectProfilesPrimary, getDirectProfilesForAttributes(attr).size());
+
+    DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    outputDevices.remove(mMsdOutputDevice);
+    mManager->setMsdOutputPatches(&outputDevices);
+
+    // after setting up MSD audio patches the MSD direct profiles are added
+    ASSERT_EQ(countDirectProfilesPrimary + countDirectProfilesMsd,
+                getDirectProfilesForAttributes(attr).size());
+
+    mManager->releaseMsdOutputPatches(outputDevices);
+    // releasing the MSD audio patches gets us back to the primary hal direct profiles only
+    ASSERT_EQ(countDirectProfilesPrimary, getDirectProfilesForAttributes(attr).size());
+}
+
+TEST_P(AudioPolicyManagerTestMsd, IsDirectPlaybackSupportedWithMsd) {
+    const audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+        AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    audio_config_base_t directConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    directConfig.format = AUDIO_FORMAT_DTS;
+    directConfig.sample_rate = 48000;
+    directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_base_t nonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    nonDirectConfig.sample_rate = 48000;
+    nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_base_t nonExistentConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
+    nonExistentConfig.sample_rate = 48000;
+    nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_base_t msdDirectConfig1 = AUDIO_CONFIG_BASE_INITIALIZER;
+    msdDirectConfig1.format = AUDIO_FORMAT_AC3;
+    msdDirectConfig1.sample_rate = 48000;
+    msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_base_t msdDirectConfig2 = AUDIO_CONFIG_BASE_INITIALIZER;
+    msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
+    msdDirectConfig2.sample_rate = 48000;
+    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_base_t msdNonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+    msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    msdNonDirectConfig.sample_rate = 96000;
+    msdNonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+    // before setting MSD patches the direct MSD configs return false
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+
+    DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    outputDevices.remove(mMsdOutputDevice);
+    mManager->setMsdOutputPatches(&outputDevices);
+
+    ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+    // after setting MSD patches the direct MSD configs return true
+    ASSERT_TRUE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+    ASSERT_TRUE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+
+    mManager->releaseMsdOutputPatches(outputDevices);
+
+    ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+    // AFTER releasing MSD patches the direct MSD configs return false
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+    ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+}
+
+TEST_P(AudioPolicyManagerTestMsd, GetDirectPlaybackSupportWithMsd) {
+    const audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+        AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+    audio_config_t directConfig = AUDIO_CONFIG_INITIALIZER;
+    directConfig.format = AUDIO_FORMAT_DTS;
+    directConfig.sample_rate = 48000;
+    directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_t nonDirectConfig = AUDIO_CONFIG_INITIALIZER;
+    nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    nonDirectConfig.sample_rate = 48000;
+    nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_t nonExistentConfig = AUDIO_CONFIG_INITIALIZER;
+    nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
+    nonExistentConfig.sample_rate = 48000;
+    nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_t msdDirectConfig1 = AUDIO_CONFIG_INITIALIZER;
+    msdDirectConfig1.format = AUDIO_FORMAT_AC3;
+    msdDirectConfig1.sample_rate = 48000;
+    msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+    audio_config_t msdDirectConfig2 = AUDIO_CONFIG_INITIALIZER;
+    msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
+    msdDirectConfig2.sample_rate = 48000;
+    msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    audio_config_t msdNonDirectConfig = AUDIO_CONFIG_INITIALIZER;
+    msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    msdNonDirectConfig.sample_rate = 96000;
+    msdNonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &directConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+    // before setting MSD patches the direct MSD configs return AUDIO_DIRECT_NOT_SUPPORTED
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+
+    DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+    // Remove MSD output device to avoid patching to itself
+    outputDevices.remove(mMsdOutputDevice);
+    mManager->setMsdOutputPatches(&outputDevices);
+
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &directConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+    // after setting MSD patches the direct MSD configs return values according to their flags
+    ASSERT_EQ(AUDIO_DIRECT_OFFLOAD_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+
+    mManager->releaseMsdOutputPatches(outputDevices);
+
+    ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &directConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+    // after releasing MSD patches the direct MSD configs return AUDIO_DIRECT_NOT_SUPPORTED
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+    ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+                mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+}
+
 class AudioPolicyManagerTestWithConfigurationFile : public AudioPolicyManagerTest {
 protected:
     void SetUpManagerConfig() override;
@@ -660,6 +870,7 @@
 void AudioPolicyManagerTestWithConfigurationFile::SetUpManagerConfig() {
     status_t status = deserializeAudioPolicyFile(getConfigFile().c_str(), &mManager->getConfig());
     ASSERT_EQ(NO_ERROR, status);
+    mManager->getConfig().setSource(getConfigFile());
 }
 
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, InitSuccess) {
@@ -670,6 +881,44 @@
     dumpToLog();
 }
 
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, ListAudioPortsHasFlags) {
+    // Create an input for VOIP TX because it's not opened automatically like outputs are.
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mixPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+    audio_attributes_t attr = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, 1, &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
+                    AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+
+    std::vector<audio_port_v7> ports;
+    ASSERT_NO_FATAL_FAILURE(
+            getAudioPorts(AUDIO_PORT_TYPE_MIX, AUDIO_PORT_ROLE_NONE, &ports));
+    EXPECT_NE(0, ports.size());
+    bool hasFlags = false, foundPrimary = false, foundVoipRx = false, foundVoipTx = false;
+    for (const auto& port : ports) {
+        if ((port.active_config.config_mask & AUDIO_PORT_CONFIG_FLAGS) != 0) {
+            hasFlags = true;
+            if (port.role == AUDIO_PORT_ROLE_SOURCE) {
+                if ((port.active_config.flags.output & AUDIO_OUTPUT_FLAG_PRIMARY) != 0) {
+                    foundPrimary = true;
+                }
+                if ((port.active_config.flags.output & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
+                    foundVoipRx = true;
+                }
+            } else if (port.role == AUDIO_PORT_ROLE_SINK) {
+                if ((port.active_config.flags.input & AUDIO_INPUT_FLAG_VOIP_TX) != 0) {
+                    foundVoipTx = true;
+                }
+            }
+        }
+    }
+    EXPECT_TRUE(hasFlags);
+    EXPECT_TRUE(foundPrimary);
+    EXPECT_TRUE(foundVoipRx);
+    EXPECT_TRUE(foundVoipTx);
+}
+
 using PolicyMixTuple = std::tuple<audio_usage_t, audio_source_t, uint32_t>;
 
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
@@ -803,7 +1052,8 @@
 }
 
 class AudioPolicyManagerTestForHdmi
-        : public AudioPolicyManagerTestWithConfigurationFile {
+        : public AudioPolicyManagerTestWithConfigurationFile,
+          public testing::WithParamInterface<audio_format_t> {
 protected:
     void SetUp() override;
     std::string getConfigFile() override { return sTvConfig; }
@@ -824,7 +1074,8 @@
         "test_settop_box_surround_configuration.xml";
 
 void AudioPolicyManagerTestForHdmi::SetUp() {
-    AudioPolicyManagerTest::SetUp();
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUp());
+    mClient->addSupportedFormat(AUDIO_FORMAT_AC3);
     mClient->addSupportedFormat(AUDIO_FORMAT_E_AC3);
     mManager->setDeviceConnectionState(
             AUDIO_DEVICE_OUT_HDMI, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
@@ -914,76 +1165,90 @@
     return formats;
 }
 
-TEST_F(AudioPolicyManagerTestForHdmi, GetSurroundFormatsReturnsSupportedFormats) {
+TEST_P(AudioPolicyManagerTestForHdmi, GetSurroundFormatsReturnsSupportedFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
     auto surroundFormats = getSurroundFormatsHelper();
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
+    ASSERT_EQ(1, surroundFormats.count(GetParam()));
 }
 
-TEST_F(AudioPolicyManagerTestForHdmi,
+TEST_P(AudioPolicyManagerTestForHdmi,
         GetSurroundFormatsReturnsManipulatedFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
 
     status_t ret =
-            mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+            mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
     auto surroundFormats = getSurroundFormatsHelper();
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
-    ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+    ASSERT_EQ(1, surroundFormats.count(GetParam()));
+    ASSERT_FALSE(surroundFormats[GetParam()]);
 
-    ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
+    ret = mManager->setSurroundFormatEnabled(GetParam(), true /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
     surroundFormats = getSurroundFormatsHelper();
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
-    ASSERT_TRUE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+    ASSERT_EQ(1, surroundFormats.count(GetParam()));
+    ASSERT_TRUE(surroundFormats[GetParam()]);
 
-    ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+    ret = mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
     surroundFormats = getSurroundFormatsHelper();
-    ASSERT_EQ(1, surroundFormats.count(AUDIO_FORMAT_E_AC3));
-    ASSERT_FALSE(surroundFormats[AUDIO_FORMAT_E_AC3]);
+    ASSERT_EQ(1, surroundFormats.count(GetParam()));
+    ASSERT_FALSE(surroundFormats[GetParam()]);
 }
 
-TEST_F(AudioPolicyManagerTestForHdmi,
+TEST_P(AudioPolicyManagerTestForHdmi,
         ListAudioPortsReturnManipulatedHdmiFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
 
-    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/));
+    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/));
     auto formats = getFormatsFromPorts();
-    ASSERT_EQ(0, formats.count(AUDIO_FORMAT_E_AC3));
+    ASSERT_EQ(0, formats.count(GetParam()));
 
-    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/));
+    ASSERT_EQ(NO_ERROR, mManager->setSurroundFormatEnabled(GetParam(), true /*enabled*/));
     formats = getFormatsFromPorts();
-    ASSERT_EQ(1, formats.count(AUDIO_FORMAT_E_AC3));
+    ASSERT_EQ(1, formats.count(GetParam()));
 }
 
-TEST_F(AudioPolicyManagerTestForHdmi,
+TEST_P(AudioPolicyManagerTestForHdmi,
         GetReportedSurroundFormatsReturnsHdmiReportedFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
     auto surroundFormats = getReportedSurroundFormatsHelper();
-    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), GetParam()));
 }
 
-TEST_F(AudioPolicyManagerTestForHdmi,
+TEST_P(AudioPolicyManagerTestForHdmi,
         GetReportedSurroundFormatsReturnsNonManipulatedHdmiReportedFormats) {
     mManager->setForceUse(
             AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL);
 
-    status_t ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, false /*enabled*/);
+    status_t ret = mManager->setSurroundFormatEnabled(GetParam(), false /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
     auto surroundFormats = getReportedSurroundFormatsHelper();
-    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), GetParam()));
 
-    ret = mManager->setSurroundFormatEnabled(AUDIO_FORMAT_E_AC3, true /*enabled*/);
+    ret = mManager->setSurroundFormatEnabled(GetParam(), true /*enabled*/);
     ASSERT_EQ(NO_ERROR, ret);
     surroundFormats = getReportedSurroundFormatsHelper();
-    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), AUDIO_FORMAT_E_AC3));
+    ASSERT_EQ(1, std::count(surroundFormats.begin(), surroundFormats.end(), GetParam()));
 }
 
+TEST_P(AudioPolicyManagerTestForHdmi, GetSurroundFormatsIgnoresSupportedFormats) {
+    mManager->setForceUse(
+            AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND, AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER);
+    auto surroundFormats = getSurroundFormatsHelper();
+    ASSERT_EQ(1, surroundFormats.count(GetParam()));
+    ASSERT_FALSE(surroundFormats[GetParam()]);
+}
+
+INSTANTIATE_TEST_SUITE_P(SurroundFormatSupport, AudioPolicyManagerTestForHdmi,
+        testing::Values(AUDIO_FORMAT_AC3, AUDIO_FORMAT_E_AC3),
+        [](const ::testing::TestParamInfo<AudioPolicyManagerTestForHdmi::ParamType>& info) {
+            return audio_format_to_string(info.param);
+        });
+
 class AudioPolicyManagerTestDPNoRemoteSubmixModule : public AudioPolicyManagerTestDynamicPolicy {
 protected:
     std::string getConfigFile() override { return sPrimaryOnlyConfig; }
@@ -1035,7 +1300,7 @@
 };
 
 void AudioPolicyManagerTestDPPlaybackReRouting::SetUp() {
-    AudioPolicyManagerTestDynamicPolicy::SetUp();
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestDynamicPolicy::SetUp());
 
     mTracker.reset(new RecordingActivityTracker());
 
@@ -1221,7 +1486,7 @@
 };
 
 void AudioPolicyManagerTestDPMixRecordInjection::SetUp() {
-    AudioPolicyManagerTestDynamicPolicy::SetUp();
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestDynamicPolicy::SetUp());
 
     mTracker.reset(new RecordingActivityTracker());
 
@@ -1375,7 +1640,8 @@
     if (type == AUDIO_DEVICE_OUT_HDMI) {
         // Set device connection state failed due to no device descriptor found
         // For HDMI case, it is easier to simulate device descriptor not found error
-        // by using a undeclared encoded format.
+        // by using an encoded format which isn't listed in the 'encodedFormats'
+        // attribute for this devicePort.
         ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
                 type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                 address.c_str(), name.c_str(), AUDIO_FORMAT_MAT_2_1));
@@ -1519,7 +1785,7 @@
 };
 
 void AudioPolicyManagerDynamicHwModulesTest::SetUpManagerConfig() {
-    AudioPolicyManagerTestWithConfigurationFile::SetUpManagerConfig();
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUpManagerConfig());
     // Only allow successful opening of "primary" hw module during APM initialization.
     mClient->swapAllowedModuleNames({"primary"});
 }
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 87f0ab9..5e1822a 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -14,7 +14,7 @@
      limitations under the License.
 -->
 
-<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
     <globalConfiguration speaker_drc_enabled="true"/>
 
     <modules>
@@ -41,8 +41,18 @@
                 </mixPort>
                 <mixPort name="mixport_bt_hfp_input" role="sink">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
-                             samplingRates="8000,11025,16000,44100,48000"
-                             channelMasks="AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_MONO"/>
+                             samplingRates="8000 11025 16000 44100 48000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_tx" role="sink"
+                         flags="AUDIO_INPUT_FLAG_VOIP_TX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_rx" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_VOIP_RX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                           samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
             </mixPorts>
             <devicePorts>
@@ -50,7 +60,8 @@
                 </devicePort>
                 <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
                 </devicePort>
-                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink"
+                            encodedFormats="AUDIO_FORMAT_AC3">
                 </devicePort>
                 <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
                 </devicePort>
@@ -63,13 +74,15 @@
             </devicePorts>
             <routes>
                 <route type="mix" sink="Speaker"
-                       sources="primary output"/>
+                       sources="primary output,voip_rx"/>
                 <route type="mix" sink="primary input"
                        sources="Built-In Mic,Hdmi-In Mic"/>
+                <route type="mix" sink="voip_tx"
+                       sources="Built-In Mic"/>
                 <route type="mix" sink="Hdmi"
                        sources="primary output"/>
                 <route type="mix" sink="BT SCO"
-                       sources="mixport_bt_hfp_output"/>
+                       sources="mixport_bt_hfp_output,voip_rx"/>
                 <route type="mix" sink="mixport_bt_hfp_input"
                        sources="BT SCO Headset Mic"/>
             </routes>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 26562e0..981c569 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -42,13 +42,15 @@
 
     srcs: [
         "CameraService.cpp",
+        "CameraServiceWatchdog.cpp",
         "CameraFlashlight.cpp",
         "common/Camera2ClientBase.cpp",
         "common/CameraDeviceBase.cpp",
         "common/CameraOfflineSessionBase.cpp",
         "common/CameraProviderManager.cpp",
-        "common/DepthPhotoProcessor.cpp",
         "common/FrameProcessorBase.cpp",
+        "common/hidl/HidlProviderInfo.cpp",
+        "common/aidl/AidlProviderInfo.cpp",
         "api1/Camera2Client.cpp",
         "api1/client2/Parameters.cpp",
         "api1/client2/FrameProcessor.cpp",
@@ -76,32 +78,37 @@
         "device3/StatusTracker.cpp",
         "device3/Camera3BufferManager.cpp",
         "device3/Camera3StreamSplitter.cpp",
-        "device3/CoordinateMapper.cpp",
-        "device3/DistortionMapper.cpp",
-        "device3/ZoomRatioMapper.cpp",
-        "device3/RotateAndCropMapper.cpp",
         "device3/Camera3OutputStreamInterface.cpp",
         "device3/Camera3OutputUtils.cpp",
         "device3/Camera3DeviceInjectionMethods.cpp",
         "device3/UHRCropAndMeteringRegionMapper.cpp",
+        "device3/PreviewFrameSpacer.cpp",
+        "device3/hidl/HidlCamera3Device.cpp",
+        "device3/hidl/HidlCamera3OfflineSession.cpp",
+        "device3/hidl/HidlCamera3OutputUtils.cpp",
+        "device3/aidl/AidlCamera3Device.cpp",
+        "device3/aidl/AidlCamera3OutputUtils.cpp",
+        "device3/aidl/AidlCamera3OfflineSession.cpp",
         "gui/RingBufferConsumer.cpp",
         "hidl/AidlCameraDeviceCallbacks.cpp",
         "hidl/AidlCameraServiceListener.cpp",
-        "hidl/Convert.cpp",
         "hidl/HidlCameraDeviceUser.cpp",
         "hidl/HidlCameraService.cpp",
+        "hidl/Utils.cpp",
         "utils/CameraServiceProxyWrapper.cpp",
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
-        "utils/ExifUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
+        "utils/SessionConfigurationUtilsHidl.cpp",
         "utils/SessionStatsBuilder.cpp",
         "utils/TagMonitor.cpp",
         "utils/LatencyHistogram.cpp",
     ],
 
     header_libs: [
+        "libdynamic_depth-internal_headers",
+        "libdynamic_depth-public_headers",
         "libmediadrm_headers",
         "libmediametrics_headers",
     ],
@@ -115,6 +122,7 @@
         "libutilscallstack",
         "libutils",
         "libbinder",
+        "libbinder_ndk",
         "libactivitymanager_aidl",
         "libpermission",
         "libcutils",
@@ -148,19 +156,23 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V1-ndk",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
         "android.hardware.camera.device@3.4",
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
+        "android.hardware.camera.device-V1-ndk",
         "media_permission-aidl-cpp",
     ],
 
     static_libs: [
+        "libaidlcommonsupport",
         "libprocessinfoservice_aidl",
         "libbinderthreadstateutils",
         "media_permission-aidl-cpp",
+        "libcameraservice_device_independent",
     ],
 
     export_shared_lib_headers: [
@@ -175,9 +187,55 @@
     include_dirs: [
         "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
-        "frameworks/av/media/ndk",
-        "external/dynamic_depth/includes",
-        "external/dynamic_depth/internal",
+    ],
+
+    export_include_dirs: ["."],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+        "-Wno-ignored-qualifiers",
+    ],
+
+}
+
+cc_library_static {
+    name: "libcameraservice_device_independent",
+    host_supported: true,
+
+    // Camera service source
+
+    srcs: [
+        "common/DepthPhotoProcessor.cpp",
+        "device3/CoordinateMapper.cpp",
+        "device3/DistortionMapper.cpp",
+        "device3/RotateAndCropMapper.cpp",
+        "device3/ZoomRatioMapper.cpp",
+        "utils/ExifUtils.cpp",
+        "utils/SessionConfigurationUtilsHost.cpp",
+    ],
+
+    header_libs: [
+        "libdynamic_depth-internal_headers",
+        "libdynamic_depth-public_headers",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libdynamic_depth",
+        "libexif",
+        "libjpeg",
+        "liblog",
+        "libutils",
+        "libxml2",
+    ],
+
+    include_dirs: [
+        "frameworks/av/camera/include",
+        "frameworks/av/camera/include/camera",
     ],
 
     export_include_dirs: ["."],
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index ccdd9e5..ffd38be 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -32,8 +32,6 @@
 
 namespace android {
 
-using hardware::camera::common::V1_0::TorchModeStatus;
-
 /////////////////////////////////////////////////////////////////////
 // CameraFlashlight implementation begins
 // used by camera service to control flashflight.
@@ -119,6 +117,59 @@
     return res;
 }
 
+status_t CameraFlashlight::turnOnTorchWithStrengthLevel(const String8& cameraId,
+            int32_t torchStrength) {
+    if (!mFlashlightMapInitialized) {
+        ALOGE("%s: findFlashUnits() must be called before this method.",
+               __FUNCTION__);
+        return NO_INIT;
+    }
+
+    ALOGV("%s: set torch strength of camera %s to %d", __FUNCTION__,
+            cameraId.string(), torchStrength);
+    status_t res = OK;
+    Mutex::Autolock l(mLock);
+
+    if (mOpenedCameraIds.indexOf(cameraId) != NAME_NOT_FOUND) {
+        ALOGE("%s: Camera device %s is in use, cannot be turned ON.",
+                __FUNCTION__, cameraId.string());
+        return -EBUSY;
+    }
+
+    if (mFlashControl == NULL) {
+        res = createFlashlightControl(cameraId);
+        if (res) {
+            return res;
+        }
+    }
+
+    res = mFlashControl->turnOnTorchWithStrengthLevel(cameraId, torchStrength);
+    return res;
+}
+
+
+status_t CameraFlashlight::getTorchStrengthLevel(const String8& cameraId,
+            int32_t* torchStrength) {
+    status_t res = OK;
+    if (!mFlashlightMapInitialized) {
+        ALOGE("%s: findFlashUnits() must be called before this method.",
+            __FUNCTION__);
+        return false;
+    }
+
+    Mutex::Autolock l(mLock);
+
+    if (mFlashControl == NULL) {
+        res = createFlashlightControl(cameraId);
+        if (res) {
+            return res;
+        }
+    }
+
+    res = mFlashControl->getTorchStrengthLevel(cameraId, torchStrength);
+    return res;
+}
+
 status_t CameraFlashlight::findFlashUnits() {
     Mutex::Autolock l(mLock);
     status_t res;
@@ -306,6 +357,22 @@
 
     return mProviderManager->setTorchMode(cameraId.string(), enabled);
 }
+
+status_t ProviderFlashControl::turnOnTorchWithStrengthLevel(const String8& cameraId,
+            int32_t torchStrength) {
+    ALOGV("%s: change torch strength level of camera %s to %d", __FUNCTION__,
+            cameraId.string(), torchStrength);
+
+    return mProviderManager->turnOnTorchWithStrengthLevel(cameraId.string(), torchStrength);
+}
+
+status_t ProviderFlashControl::getTorchStrengthLevel(const String8& cameraId,
+            int32_t* torchStrength) {
+    ALOGV("%s: get torch strength level of camera %s", __FUNCTION__,
+            cameraId.string());
+
+    return mProviderManager->getTorchStrengthLevel(cameraId.string(), torchStrength);
+}
 // ProviderFlashControl implementation ends
 
 }
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index b97fa5f..1703ddc 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -44,6 +44,14 @@
         // set the torch mode to on or off.
         virtual status_t setTorchMode(const String8& cameraId,
                     bool enabled) = 0;
+
+        // Change the brightness level of the torch. If the torch is OFF and
+        // torchStrength >= 1, then the torch will also be turned ON.
+        virtual status_t turnOnTorchWithStrengthLevel(const String8& cameraId,
+                    int32_t torchStrength) = 0;
+
+        // Returns the torch strength level.
+        virtual status_t getTorchStrengthLevel(const String8& cameraId, int32_t* torchStrength) = 0;
 };
 
 /**
@@ -67,6 +75,12 @@
         // set the torch mode to on or off.
         status_t setTorchMode(const String8& cameraId, bool enabled);
 
+        // Change the torch strength level of the flash unit in torch mode.
+        status_t turnOnTorchWithStrengthLevel(const String8& cameraId, int32_t torchStrength);
+
+        // Get the torch strength level
+        status_t getTorchStrengthLevel(const String8& cameraId, int32_t* torchStrength);
+
         // Notify CameraFlashlight that camera service is going to open a camera
         // device. CameraFlashlight will free the resources that may cause the
         // camera open to fail. Camera service must call this function before
@@ -115,6 +129,8 @@
         // FlashControlBase
         status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
         status_t setTorchMode(const String8& cameraId, bool enabled);
+        status_t turnOnTorchWithStrengthLevel(const String8& cameraId, int32_t torchStrength);
+        status_t getTorchStrengthLevel(const String8& cameraId, int32_t* torchStrength);
 
     private:
         sp<CameraProviderManager> mProviderManager;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 4dfbb6f..80410ab 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -28,6 +28,7 @@
 #include <sys/types.h>
 #include <inttypes.h>
 #include <pthread.h>
+#include <poll.h>
 
 #include <android/hardware/ICamera.h>
 #include <android/hardware/ICameraClient.h>
@@ -85,13 +86,11 @@
 
 using base::StringPrintf;
 using binder::Status;
-using camera3::SessionConfigurationUtils;
+using namespace camera3;
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::ICamera;
 using hardware::ICameraClient;
 using hardware::ICameraServiceListener;
-using hardware::camera::common::V1_0::CameraDeviceStatus;
-using hardware::camera::common::V1_0::TorchModeStatus;
 using hardware::camera2::ICameraInjectionCallback;
 using hardware::camera2::ICameraInjectionSession;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
@@ -133,10 +132,12 @@
 static const String16
         sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
 const char *sFileName = "lastOpenSessionDumpFile";
-static constexpr int32_t kVendorClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
-static constexpr int32_t kVendorClientState = ActivityManager::PROCESS_STATE_PERSISTENT_UI;
+static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
+static constexpr int32_t kSystemNativeClientState =
+        ActivityManager::PROCESS_STATE_PERSISTENT_UI;
 
 const String8 CameraService::kOfflineDevice("offline-");
+const String16 CameraService::kWatchAllClientsFlag("all");
 
 // Set to keep track of logged service error events.
 static std::set<String8> sServiceErrorEventSet;
@@ -155,6 +156,12 @@
     }
 }
 
+// The word 'System' here does not refer to clients only on the system
+// partition. They just need to have a android system uid.
+static bool doesClientHaveSystemUid() {
+    return (CameraThreadState::getCallingUid() < AID_APP_START);
+}
+
 void CameraService::onFirstRef()
 {
 
@@ -361,18 +368,20 @@
 
 void CameraService::addStates(const String8 id) {
     std::string cameraId(id.c_str());
-    hardware::camera::common::V1_0::CameraResourceCost cost;
+    CameraResourceCost cost;
     status_t res = mCameraProviderManager->getResourceCost(cameraId, &cost);
-    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (res != OK) {
         ALOGE("Failed to query device resource cost: %s (%d)", strerror(-res), res);
         return;
     }
+    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     res = mCameraProviderManager->getSystemCameraKind(cameraId, &deviceKind);
     if (res != OK) {
         ALOGE("Failed to query device kind: %s (%d)", strerror(-res), res);
         return;
     }
+    std::vector<std::string> physicalCameraIds;
+    mCameraProviderManager->isLogicalCamera(cameraId, &physicalCameraIds);
     std::set<String8> conflicting;
     for (size_t i = 0; i < cost.conflictingDevices.size(); i++) {
         conflicting.emplace(String8(cost.conflictingDevices[i].c_str()));
@@ -381,7 +390,7 @@
     {
         Mutex::Autolock lock(mCameraStatesLock);
         mCameraStates.emplace(id, std::make_shared<CameraState>(id, cost.resourceCost,
-                                                                conflicting, deviceKind));
+                conflicting, deviceKind, physicalCameraIds));
     }
 
     if (mFlashlight->hasFlashUnit(id)) {
@@ -560,6 +569,22 @@
     onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
+
+void CameraService::onTorchStatusChanged(const String8& cameraId,
+        TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
+    Mutex::Autolock al(mTorchStatusMutex);
+    onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
+}
+
+void CameraService::broadcastTorchStrengthLevel(const String8& cameraId,
+        int32_t newStrengthLevel) {
+    Mutex::Autolock lock(mStatusListenerLock);
+    for (auto& i : mListenerList) {
+        i->getListener()->onTorchStrengthLevelChanged(String16{cameraId},
+                newStrengthLevel);
+    }
+}
+
 void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
@@ -613,8 +638,10 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
-    return checkPermission(sSystemCameraPermission, callingPid, callingUid) &&
+static bool hasPermissionsForSystemCamera(int callingPid, int callingUid,
+        bool logPermissionFailure = false) {
+    return checkPermission(sSystemCameraPermission, callingPid, callingUid,
+            logPermissionFailure) &&
             checkPermission(sCameraPermission, callingPid, callingUid);
 }
 
@@ -693,8 +720,8 @@
     const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
     auto callingPid = CameraThreadState::getCallingPid();
     auto callingUid = CameraThreadState::getCallingUid();
-    if (checkPermission(sSystemCameraPermission, callingPid, callingUid) ||
-            getpid() == callingPid) {
+    if (checkPermission(sSystemCameraPermission, callingPid, callingUid,
+            /*logPermissionFailure*/false) || getpid() == callingPid) {
         deviceIds = &mNormalDeviceIds;
     }
     if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
@@ -793,6 +820,31 @@
     return ret;
 }
 
+Status CameraService::getTorchStrengthLevel(const String16& cameraId,
+        int32_t* torchStrength) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mServiceLock);
+    if (!mInitialized) {
+        ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
+        return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
+    }
+
+    if(torchStrength == NULL) {
+        ALOGE("%s: strength level must not be null.", __FUNCTION__);
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Strength level should not be null.");
+    }
+
+    status_t res = mCameraProviderManager->getTorchStrengthLevel(String8(cameraId).string(),
+        torchStrength);
+    if (res != OK) {
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve torch "
+            "strength level for device %s: %s (%d)", String8(cameraId).string(),
+            strerror(-res), res);
+    }
+    ALOGI("%s: Torch strength level is: %d", __FUNCTION__, *torchStrength);
+    return Status::ok();
+}
+
 String8 CameraService::getFormattedCurrentTime() {
     time_t now = time(nullptr);
     char formattedTime[64];
@@ -835,29 +887,37 @@
     BasicClient::BasicClient::sCameraService = nullptr;
 }
 
-int CameraService::getDeviceVersion(const String8& cameraId, int* facing, int* orientation) {
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId, int* facing,
+        int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
 
     status_t res;
     hardware::hidl_version maxVersion{0,0};
+    IPCTransport transport = IPCTransport::INVALID;
     res = mCameraProviderManager->getHighestSupportedVersion(cameraId.string(),
-            &maxVersion);
-    if (res != OK) return -1;
+            &maxVersion, &transport);
+    if (res != OK || transport == IPCTransport::INVALID) {
+        ALOGE("%s: Unable to get highest supported version for camera id %s", __FUNCTION__,
+                cameraId.string());
+        return std::make_pair(-1, IPCTransport::INVALID) ;
+    }
     deviceVersion = HARDWARE_DEVICE_API_VERSION(maxVersion.get_major(), maxVersion.get_minor());
 
     hardware::CameraInfo info;
     if (facing) {
         res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
-        if (res != OK) return -1;
+        if (res != OK) {
+            return std::make_pair(-1, IPCTransport::INVALID);
+        }
         *facing = info.facing;
         if (orientation) {
             *orientation = info.orientation;
         }
     }
 
-    return deviceVersion;
+    return std::make_pair(deviceVersion, transport);
 }
 
 Status CameraService::filterGetInfoErrorCode(status_t err) {
@@ -878,48 +938,50 @@
 }
 
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
-        const sp<IInterface>& cameraCb, const String16& packageName,
+        const sp<IInterface>& cameraCb, const String16& packageName, bool systemNativeClient,
         const std::optional<String16>& featureId,  const String8& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid, int deviceVersion, apiLevel effectiveApiLevel, bool overrideForPerfClass,
-        /*out*/sp<BasicClient>* client) {
-
-    // Create CameraClient based on device version reported by the HAL.
-    switch(deviceVersion) {
-        case CAMERA_DEVICE_API_VERSION_1_0:
-            ALOGE("Camera using old HAL version: %d", deviceVersion);
-            return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
-                    "Camera device \"%s\" HAL version %d no longer supported",
-                    cameraId.string(), deviceVersion);
-            break;
-        case CAMERA_DEVICE_API_VERSION_3_0:
-        case CAMERA_DEVICE_API_VERSION_3_1:
-        case CAMERA_DEVICE_API_VERSION_3_2:
-        case CAMERA_DEVICE_API_VERSION_3_3:
-        case CAMERA_DEVICE_API_VERSION_3_4:
-        case CAMERA_DEVICE_API_VERSION_3_5:
-        case CAMERA_DEVICE_API_VERSION_3_6:
-        case CAMERA_DEVICE_API_VERSION_3_7:
-            if (effectiveApiLevel == API_1) { // Camera1 API route
-                sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
-                *client = new Camera2Client(cameraService, tmp, packageName, featureId,
-                        cameraId, api1CameraId,
-                        facing, sensorOrientation, clientPid, clientUid,
-                        servicePid, overrideForPerfClass);
-            } else { // Camera2 API route
-                sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
-                        static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
-                *client = new CameraDeviceClient(cameraService, tmp, packageName, featureId,
-                        cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                        overrideForPerfClass);
-            }
-            break;
-        default:
-            // Should not be reachable
-            ALOGE("Unknown camera device HAL version: %d", deviceVersion);
-            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
-                    "Camera device \"%s\" has unknown HAL version %d",
-                    cameraId.string(), deviceVersion);
+        int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client) {
+    // For HIDL devices
+    if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
+        // Create CameraClient based on device version reported by the HAL.
+        int deviceVersion = deviceVersionAndTransport.first;
+        switch(deviceVersion) {
+            case CAMERA_DEVICE_API_VERSION_1_0:
+                ALOGE("Camera using old HAL version: %d", deviceVersion);
+                return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
+                        "Camera device \"%s\" HAL version %d no longer supported",
+                        cameraId.string(), deviceVersion);
+                break;
+            case CAMERA_DEVICE_API_VERSION_3_0:
+            case CAMERA_DEVICE_API_VERSION_3_1:
+            case CAMERA_DEVICE_API_VERSION_3_2:
+            case CAMERA_DEVICE_API_VERSION_3_3:
+            case CAMERA_DEVICE_API_VERSION_3_4:
+            case CAMERA_DEVICE_API_VERSION_3_5:
+            case CAMERA_DEVICE_API_VERSION_3_6:
+            case CAMERA_DEVICE_API_VERSION_3_7:
+                break;
+            default:
+                // Should not be reachable
+                ALOGE("Unknown camera device HAL version: %d", deviceVersion);
+                return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                        "Camera device \"%s\" has unknown HAL version %d",
+                        cameraId.string(), deviceVersion);
+        }
+    }
+    if (effectiveApiLevel == API_1) { // Camera1 API route
+        sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
+        *client = new Camera2Client(cameraService, tmp, packageName, featureId,
+                cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
+                servicePid, overrideForPerfClass);
+    } else { // Camera2 API route
+        sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
+                static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
+        *client = new CameraDeviceClient(cameraService, tmp, packageName,
+                systemNativeClient, featureId, cameraId, facing, sensorOrientation,
+                clientPid, clientUid, servicePid, overrideForPerfClass);
     }
     return Status::ok();
 }
@@ -1007,7 +1069,7 @@
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, id, cameraId,
-            internalPackageName, {}, uid, USE_CALLING_PID,
+            internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*out*/ tmp)
             ).isOk()) {
@@ -1242,9 +1304,9 @@
 
     userid_t clientUserId = multiuser_get_user_id(clientUid);
 
-    // Only allow clients who are being used by the current foreground device user, unless calling
-    // from our own process OR the caller is using the cameraserver's HIDL interface.
-    if (getCurrentServingCall() != BinderCallType::HWBINDER && callingPid != getpid() &&
+    // For non-system clients : Only allow clients who are being used by the current foreground
+    // device user, unless calling from our own process.
+    if (!doesClientHaveSystemUid() && callingPid != getpid() &&
             (mAllowedUsers.find(clientUserId) == mAllowedUsers.end())) {
         ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from "
                 "device user %d, currently allowed device users: %s)", callingPid, clientUserId,
@@ -1281,12 +1343,12 @@
 }
 
 void CameraService::finishConnectLocked(const sp<BasicClient>& client,
-        const CameraService::DescriptorPtr& desc, int oomScoreOffset) {
+        const CameraService::DescriptorPtr& desc, int oomScoreOffset, bool systemNativeClient) {
 
     // Make a descriptor for the incoming client
     auto clientDescriptor =
             CameraService::CameraClientManager::makeClientDescriptor(client, desc,
-                    oomScoreOffset);
+                    oomScoreOffset, systemNativeClient);
     auto evicted = mActiveClientManager.addAndEvict(clientDescriptor);
 
     logConnected(desc->getKey(), static_cast<int>(desc->getOwnerId()),
@@ -1316,7 +1378,7 @@
 
 status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clientPid,
         apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName,
-        int oomScoreOffset,
+        int oomScoreOffset, bool systemNativeClient,
         /*out*/
         sp<BasicClient>* client,
         std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial) {
@@ -1334,7 +1396,7 @@
                 auto clientSp = current->getValue();
                 if (clientSp.get() != nullptr) { // should never be needed
                     if (!clientSp->canCastToApiClient(effectiveApiLevel)) {
-                        ALOGW("CameraService connect called from same client, but with a different"
+                        ALOGW("CameraService connect called with a different"
                                 " API level, evicting prior client...");
                     } else if (clientSp->getRemote() == remoteCallback) {
                         ALOGI("CameraService::connect X (PID %d) (second call from same"
@@ -1391,7 +1453,7 @@
         clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
                 sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
                 state->getConflicting(), actualScore, clientPid, actualState,
-                oomScoreOffset);
+                oomScoreOffset, systemNativeClient);
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
 
@@ -1534,7 +1596,7 @@
     String8 id = cameraIdIntToStr(api1CameraId);
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
-            clientPackageName, {}, clientUid, clientPid, API_1,
+            clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, /*out*/client);
 
     if(!ret.isOk()) {
@@ -1566,14 +1628,14 @@
     // 1) If cameraserver tries to access this camera device, accept the
     //    connection.
     // 2) The camera device is a publicly hidden secure camera device AND some
-    //    component is trying to access it on a non-hwbinder thread (generally a non HAL client),
-    //    reject it.
+    //    non system component is trying to access it.
     // 3) if the camera device is advertised by the camera HAL as SYSTEM_ONLY
     //    and the serving thread is a non hwbinder thread, the client must have
     //    android.permission.SYSTEM_CAMERA permissions to connect.
 
     int cPid = CameraThreadState::getCallingPid();
     int cUid = CameraThreadState::getCallingUid();
+    bool systemClient = doesClientHaveSystemUid();
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
         // This isn't a known camera ID, so it's not a system camera
@@ -1586,8 +1648,7 @@
         return false;
     }
     // (2)
-    if (getCurrentServingCall() != BinderCallType::HWBINDER &&
-            systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA) {
+    if (!systemClient && systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA) {
         ALOGW("Rejecting access to secure hidden camera %s", cameraId.c_str());
         return true;
     }
@@ -1595,9 +1656,8 @@
     //     getCameraCharacteristics() allows for calls to succeed (albeit after hiding some
     //     characteristics) even if clients don't have android.permission.CAMERA. We do not want the
     //     same behavior for system camera devices.
-    if (getCurrentServingCall() != BinderCallType::HWBINDER &&
-            systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(cPid, cUid)) {
+    if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
+            !hasPermissionsForSystemCamera(cPid, cUid, /*logPermissionFailure*/true)) {
         ALOGW("Rejecting access to system only camera %s, inadequete permissions",
                 cameraId.c_str());
         return true;
@@ -1621,11 +1681,12 @@
     sp<CameraDeviceClient> client = nullptr;
     String16 clientPackageNameAdj = clientPackageName;
     int callingPid = CameraThreadState::getCallingPid();
-
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        std::string vendorClient =
-                StringPrintf("vendor.client.pid<%d>", CameraThreadState::getCallingPid());
-        clientPackageNameAdj = String16(vendorClient.c_str());
+    bool systemNativeClient = false;
+    if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
+        std::string systemClient =
+                StringPrintf("client.pid<%d>", CameraThreadState::getCallingPid());
+        clientPackageNameAdj = String16(systemClient.c_str());
+        systemNativeClient = true;
     }
 
     if (oomScoreOffset < 0) {
@@ -1637,6 +1698,13 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
+    if (CameraServiceProxyWrapper::isCameraDisabled()) {
+        String8 msg =
+                String8::format("Camera disabled by device policy");
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(ERROR_DISABLED, msg.string());
+    }
+
     // enforce system camera permissions
     if (oomScoreOffset > 0 &&
             !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
@@ -1649,7 +1717,7 @@
     }
 
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
-            /*api1CameraId*/-1, clientPackageNameAdj, clientFeatureId,
+            /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
             targetSdkVersion, /*out*/client);
 
@@ -1680,7 +1748,7 @@
 
 template<class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int api1CameraId, const String16& clientPackageName,
+        int api1CameraId, const String16& clientPackageName, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
         /*out*/sp<CLIENT>& device) {
@@ -1699,7 +1767,7 @@
     sp<CLIENT> client = nullptr;
     int facing = -1;
     int orientation = 0;
-    bool isNdk = (clientPackageName.size() == 0);
+    bool isNonSystemNdk = (clientPackageName.size() == 0);
     {
         // Acquire mServiceLock and prevent other clients from connecting
         std::unique_ptr<AutoConditionLock> lock =
@@ -1733,8 +1801,8 @@
         sp<BasicClient> clientTmp = nullptr;
         std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>> partial;
         if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel,
-                IInterface::asBinder(cameraCb), clientName8, oomScoreOffset, /*out*/&clientTmp,
-                /*out*/&partial)) != NO_ERROR) {
+                IInterface::asBinder(cameraCb), clientName8, oomScoreOffset, systemNativeClient,
+                /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
             switch (err) {
                 case -ENODEV:
                     return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
@@ -1764,7 +1832,8 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
-        int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
+        auto deviceVersionAndTransport =
+                getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1774,10 +1843,10 @@
         sp<BasicClient> tmp = nullptr;
         bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                 mPerfClassPrimaryCameraIds, cameraId.string(), targetSdkVersion);
-        if(!(ret = makeClient(this, cameraCb, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, facing, orientation,
+        if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
+                clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
-                deviceVersion, effectiveApiLevel, overrideForPerfClass,
+                deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -1786,7 +1855,8 @@
         LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
                 __FUNCTION__);
 
-        err = client->initialize(mCameraProviderManager, mMonitorTags);
+        String8 monitorTags = isClientWatched(client.get()) ? mMonitorTags : String8("");
+        err = client->initialize(mCameraProviderManager, monitorTags);
         if (err != OK) {
             ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
             // Errors could be from the HAL module open call or from AppOpsManager
@@ -1834,8 +1904,7 @@
         // Set rotate-and-crop override behavior
         if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
-        } else if (effectiveApiLevel == API_2) {
-
+        } else {
           client->setRotateAndCropOverride(
               CameraServiceProxyWrapper::getRotateAndCropOverride(
                   clientPackageName, facing, multiuser_get_user_id(clientUid)));
@@ -1843,7 +1912,7 @@
 
         // Set camera muting behavior
         bool isCameraPrivacyEnabled =
-                mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
+                mSensorPrivacyPolicy->isCameraPrivacyEnabled();
         if (client->supportsCameraMute()) {
             client->setCameraMute(
                     mOverrideCameraMuteMode || isCameraPrivacyEnabled);
@@ -1876,7 +1945,7 @@
             mServiceLock.lock();
         } else {
             // Otherwise, add client to active clients list
-            finishConnectLocked(client, partial, oomScoreOffset);
+            finishConnectLocked(client, partial, oomScoreOffset, systemNativeClient);
         }
 
         client->setImageDumpMask(mImageDumpMask);
@@ -1888,7 +1957,34 @@
 
     int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
     CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
-            effectiveApiLevel, isNdk, openLatencyMs);
+            effectiveApiLevel, isNonSystemNdk, openLatencyMs);
+
+    {
+        Mutex::Autolock lock(mInjectionParametersLock);
+        if (cameraId == mInjectionInternalCamId && mInjectionInitPending) {
+            mInjectionInitPending = false;
+            status_t res = NO_ERROR;
+            auto clientDescriptor = mActiveClientManager.get(mInjectionInternalCamId);
+            if (clientDescriptor != nullptr) {
+                sp<BasicClient> clientSp = clientDescriptor->getValue();
+                res = checkIfInjectionCameraIsPresent(mInjectionExternalCamId, clientSp);
+                if(res != OK) {
+                    return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
+                            "No camera device with ID \"%s\" currently available",
+                            mInjectionExternalCamId.string());
+                }
+                res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
+                if (res != OK) {
+                    mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
+                }
+            } else {
+                ALOGE("%s: Internal camera ID = %s 's client does not exist!",
+                        __FUNCTION__, mInjectionInternalCamId.string());
+                res = NO_INIT;
+                mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
+            }
+        }
+    }
 
     return ret;
 }
@@ -1924,7 +2020,8 @@
                 kOfflineDevice + onlineClientDesc->getKey(), offlineClient, /*cost*/ 0,
                 /*conflictingKeys*/ std::set<String8>(), onlinePriority.getScore(),
                 onlineClientDesc->getOwnerId(), onlinePriority.getState(),
-                /*ommScoreOffset*/ 0);
+                // native clients don't have offline processing support.
+                /*ommScoreOffset*/ 0, /*systemNativeClient*/false);
 
         // Allow only one offline device per camera
         auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -1933,7 +2030,8 @@
             return BAD_VALUE;
         }
 
-        auto err = offlineClient->initialize(mCameraProviderManager, mMonitorTags);
+        String8 monitorTags = isClientWatched(offlineClient.get()) ? mMonitorTags : String8("");
+        auto err = offlineClient->initialize(mCameraProviderManager, monitorTags);
         if (err != OK) {
             ALOGE("%s: Could not initialize offline client.", __FUNCTION__);
             return err;
@@ -1965,6 +2063,136 @@
     return OK;
 }
 
+Status CameraService::turnOnTorchWithStrengthLevel(const String16& cameraId, int32_t torchStrength,
+        const sp<IBinder>& clientBinder) {
+    Mutex::Autolock lock(mServiceLock);
+
+    ATRACE_CALL();
+    if (clientBinder == nullptr) {
+        ALOGE("%s: torch client binder is NULL", __FUNCTION__);
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                "Torch client binder in null.");
+    }
+
+    String8 id = String8(cameraId.string());
+    int uid = CameraThreadState::getCallingUid();
+
+    if (shouldRejectSystemCameraConnection(id)) {
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to change the strength level"
+                "for system only device %s: ", id.string());
+    }
+
+    // verify id is valid
+    auto state = getCameraState(id);
+    if (state == nullptr) {
+        ALOGE("%s: camera id is invalid %s", __FUNCTION__, id.string());
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+            "Camera ID \"%s\" is a not valid camera ID", id.string());
+    }
+
+    StatusInternal cameraStatus = state->getStatus();
+    if (cameraStatus != StatusInternal::NOT_AVAILABLE &&
+            cameraStatus != StatusInternal::PRESENT) {
+        ALOGE("%s: camera id is invalid %s, status %d", __FUNCTION__, id.string(),
+            (int)cameraStatus);
+        return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                "Camera ID \"%s\" is a not valid camera ID", id.string());
+    }
+
+    {
+        Mutex::Autolock al(mTorchStatusMutex);
+        TorchModeStatus status;
+        status_t err = getTorchStatusLocked(id, &status);
+        if (err != OK) {
+            if (err == NAME_NOT_FOUND) {
+             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                    "Camera \"%s\" does not have a flash unit", id.string());
+            }
+            ALOGE("%s: getting current torch status failed for camera %s",
+                    __FUNCTION__, id.string());
+            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                    "Error changing torch strength level for camera \"%s\": %s (%d)",
+                    id.string(), strerror(-err), err);
+        }
+
+        if (status == TorchModeStatus::NOT_AVAILABLE) {
+            if (cameraStatus == StatusInternal::NOT_AVAILABLE) {
+                ALOGE("%s: torch mode of camera %s is not available because "
+                        "camera is in use.", __FUNCTION__, id.string());
+                return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
+                        "Torch for camera \"%s\" is not available due to an existing camera user",
+                        id.string());
+            } else {
+                ALOGE("%s: torch mode of camera %s is not available due to "
+                       "insufficient resources", __FUNCTION__, id.string());
+                return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
+                        "Torch for camera \"%s\" is not available due to insufficient resources",
+                        id.string());
+            }
+        }
+    }
+
+    {
+        Mutex::Autolock al(mTorchUidMapMutex);
+        updateTorchUidMapLocked(cameraId, uid);
+    }
+    // Check if the current torch strength level is same as the new one.
+    bool shouldSkipTorchStrengthUpdates = mCameraProviderManager->shouldSkipTorchStrengthUpdate(
+            id.string(), torchStrength);
+
+    status_t err = mFlashlight->turnOnTorchWithStrengthLevel(id, torchStrength);
+
+    if (err != OK) {
+        int32_t errorCode;
+        String8 msg;
+        switch (err) {
+            case -ENOSYS:
+                msg = String8::format("Camera \"%s\" has no flashlight.",
+                    id.string());
+                errorCode = ERROR_ILLEGAL_ARGUMENT;
+                break;
+            case -EBUSY:
+                msg = String8::format("Camera \"%s\" is in use",
+                    id.string());
+                errorCode = ERROR_CAMERA_IN_USE;
+                break;
+            case -EINVAL:
+                msg = String8::format("Torch strength level %d is not within the "
+                        "valid range.", torchStrength);
+                errorCode = ERROR_ILLEGAL_ARGUMENT;
+                break;
+            default:
+                msg = String8::format("Changing torch strength level failed.");
+                errorCode = ERROR_INVALID_OPERATION;
+        }
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(errorCode, msg.string());
+    }
+
+    {
+        // update the link to client's death
+        // Store the last client that turns on each camera's torch mode.
+        Mutex::Autolock al(mTorchClientMapMutex);
+        ssize_t index = mTorchClientMap.indexOfKey(id);
+        if (index == NAME_NOT_FOUND) {
+            mTorchClientMap.add(id, clientBinder);
+        } else {
+            mTorchClientMap.valueAt(index)->unlinkToDeath(this);
+            mTorchClientMap.replaceValueAt(index, clientBinder);
+        }
+        clientBinder->linkToDeath(this);
+    }
+
+    int clientPid = CameraThreadState::getCallingPid();
+    const char *id_cstr = id.c_str();
+    ALOGI("%s: Torch strength for camera id %s changed to %d for client PID %d",
+            __FUNCTION__, id_cstr, torchStrength, clientPid);
+    if (!shouldSkipTorchStrengthUpdates) {
+        broadcastTorchStrengthLevel(id, torchStrength);
+    }
+    return Status::ok();
+}
+
 Status CameraService::setTorchMode(const String16& cameraId, bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
@@ -2036,13 +2264,7 @@
         // Update UID map - this is used in the torch status changed callbacks, so must be done
         // before setTorchMode
         Mutex::Autolock al(mTorchUidMapMutex);
-        if (mTorchUidMap.find(id) == mTorchUidMap.end()) {
-            mTorchUidMap[id].first = uid;
-            mTorchUidMap[id].second = uid;
-        } else {
-            // Set the pending UID
-            mTorchUidMap[id].first = uid;
-        }
+        updateTorchUidMapLocked(cameraId, uid);
     }
 
     status_t err = mFlashlight->setTorchMode(id, enabled);
@@ -2097,6 +2319,17 @@
     return Status::ok();
 }
 
+void CameraService::updateTorchUidMapLocked(const String16& cameraId, int uid) {
+    String8 id = String8(cameraId.string());
+    if (mTorchUidMap.find(id) == mTorchUidMap.end()) {
+        mTorchUidMap[id].first = uid;
+        mTorchUidMap[id].second = uid;
+    } else {
+        // Set the pending UID
+        mTorchUidMap[id].first = uid;
+    }
+}
+
 Status CameraService::notifySystemEvent(int32_t eventId,
         const std::vector<int32_t>& args) {
     const int pid = CameraThreadState::getCallingPid();
@@ -2127,6 +2360,13 @@
             doUserSwitch(/*newUserIds*/ args);
             break;
         }
+        case ICameraService::EVENT_USB_DEVICE_ATTACHED:
+        case ICameraService::EVENT_USB_DEVICE_DETACHED: {
+            // Notify CameraProviderManager for lazy HALs
+            mCameraProviderManager->notifyUsbDeviceEvent(eventId,
+                                                        std::to_string(args[0]));
+            break;
+        }
         case ICameraService::EVENT_NONE:
         default: {
             ALOGW("%s: Received invalid system event from system_server: %d", __FUNCTION__,
@@ -2169,23 +2409,7 @@
 
     ATRACE_CALL();
 
-    using hardware::camera::provider::V2_5::DeviceState;
-    hardware::hidl_bitfield<DeviceState> newDeviceState{};
-    if (newState & ICameraService::DEVICE_STATE_BACK_COVERED) {
-        newDeviceState |= DeviceState::BACK_COVERED;
-    }
-    if (newState & ICameraService::DEVICE_STATE_FRONT_COVERED) {
-        newDeviceState |= DeviceState::FRONT_COVERED;
-    }
-    if (newState & ICameraService::DEVICE_STATE_FOLDED) {
-        newDeviceState |= DeviceState::FOLDED;
-    }
-    // Only map vendor bits directly
-    uint64_t vendorBits = static_cast<uint64_t>(newState) & 0xFFFFFFFF00000000l;
-    newDeviceState |= vendorBits;
-
-    ALOGV("%s: New device state 0x%" PRIx64, __FUNCTION__, newDeviceState);
-    mCameraProviderManager->notifyDeviceStateChange(newDeviceState);
+    mCameraProviderManager->notifyDeviceStateChange(newState);
 
     return Status::ok();
 }
@@ -2218,7 +2442,7 @@
     for (auto& current : clients) {
         if (current != nullptr) {
             const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr && basicClient->canCastToApiClient(API_2)) {
+            if (basicClient.get() != nullptr) {
               basicClient->setRotateAndCropOverride(
                   CameraServiceProxyWrapper::getRotateAndCropOverride(
                       basicClient->getPackageName(),
@@ -2340,7 +2564,7 @@
     auto clientUid = CameraThreadState::getCallingUid();
     auto clientPid = CameraThreadState::getCallingPid();
     bool openCloseCallbackAllowed = checkPermission(sCameraOpenCloseListenerPermission,
-            clientPid, clientUid);
+            clientPid, clientUid, /*logPermissionFailure*/false);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -2377,7 +2601,8 @@
         Mutex::Autolock lock(mCameraStatesLock);
         for (auto& i : mCameraStates) {
             cameraStatuses->emplace_back(i.first,
-                    mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds());
+                    mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds(),
+                    openCloseCallbackAllowed ? i.second->getClientPackage() : String8::empty());
         }
     }
     // Remove the camera statuses that should be hidden from the client, we do
@@ -2494,44 +2719,48 @@
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    int deviceVersion = getDeviceVersion(id);
-    switch (deviceVersion) {
-        case CAMERA_DEVICE_API_VERSION_1_0:
-        case CAMERA_DEVICE_API_VERSION_3_0:
-        case CAMERA_DEVICE_API_VERSION_3_1:
-            if (apiVersion == API_VERSION_2) {
-                ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without shim",
-                        __FUNCTION__, id.string(), deviceVersion);
-                *isSupported = false;
-            } else { // if (apiVersion == API_VERSION_1) {
-                ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always supported",
+    auto deviceVersionAndTransport = getDeviceVersion(id);
+    if (deviceVersionAndTransport.first == -1) {
+        String8 msg = String8::format("Unknown camera ID %s", id.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
+        int deviceVersion = deviceVersionAndTransport.first;
+        switch (deviceVersion) {
+            case CAMERA_DEVICE_API_VERSION_1_0:
+            case CAMERA_DEVICE_API_VERSION_3_0:
+            case CAMERA_DEVICE_API_VERSION_3_1:
+                if (apiVersion == API_VERSION_2) {
+                    ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without "
+                            "shim", __FUNCTION__, id.string(), deviceVersion);
+                    *isSupported = false;
+                } else { // if (apiVersion == API_VERSION_1) {
+                    ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always "
+                            "supported", __FUNCTION__, id.string());
+                    *isSupported = true;
+                }
+                break;
+            case CAMERA_DEVICE_API_VERSION_3_2:
+            case CAMERA_DEVICE_API_VERSION_3_3:
+            case CAMERA_DEVICE_API_VERSION_3_4:
+            case CAMERA_DEVICE_API_VERSION_3_5:
+            case CAMERA_DEVICE_API_VERSION_3_6:
+            case CAMERA_DEVICE_API_VERSION_3_7:
+                ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
                         __FUNCTION__, id.string());
                 *isSupported = true;
+                break;
+            default: {
+                String8 msg = String8::format("Unknown device version %x for device %s",
+                        deviceVersion, id.string());
+                ALOGE("%s: %s", __FUNCTION__, msg.string());
+                return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.string());
             }
-            break;
-        case CAMERA_DEVICE_API_VERSION_3_2:
-        case CAMERA_DEVICE_API_VERSION_3_3:
-        case CAMERA_DEVICE_API_VERSION_3_4:
-        case CAMERA_DEVICE_API_VERSION_3_5:
-        case CAMERA_DEVICE_API_VERSION_3_6:
-        case CAMERA_DEVICE_API_VERSION_3_7:
-            ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
-                    __FUNCTION__, id.string());
-            *isSupported = true;
-            break;
-        case -1: {
-            String8 msg = String8::format("Unknown camera ID %s", id.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
-        default: {
-            String8 msg = String8::format("Unknown device version %x for device %s",
-                    deviceVersion, id.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.string());
-        }
+    } else {
+        *isSupported = true;
     }
-
     return Status::ok();
 }
 
@@ -2552,7 +2781,7 @@
         const String16& externalCamId,
         const sp<ICameraInjectionCallback>& callback,
         /*out*/
-        sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession) {
+        sp<ICameraInjectionSession>* cameraInjectionSession) {
     ATRACE_CALL();
 
     if (!checkCallingPermission(sCameraInjectExternalCameraPermission)) {
@@ -2569,18 +2798,36 @@
         __FUNCTION__, String8(packageName).string(),
         String8(internalCamId).string(), String8(externalCamId).string());
 
-    binder::Status ret = binder::Status::ok();
-    // TODO: Implement the injection camera function.
-    // ret = internalInjectCamera(...);
-    // if(!ret.isOk()) {
-    //     mInjectionStatusListener->notifyInjectionError(...);
-    //     return ret;
-    // }
+    {
+        Mutex::Autolock lock(mInjectionParametersLock);
+        mInjectionInternalCamId = String8(internalCamId);
+        mInjectionExternalCamId = String8(externalCamId);
+        mInjectionStatusListener->addListener(callback);
+        *cameraInjectionSession = new CameraInjectionSession(this);
+        status_t res = NO_ERROR;
+        auto clientDescriptor = mActiveClientManager.get(mInjectionInternalCamId);
+        // If the client already exists, we can directly connect to the camera device through the
+        // client's injectCamera(), otherwise we need to wait until the client is established
+        // (execute connectHelper()) before injecting the camera to the camera device.
+        if (clientDescriptor != nullptr) {
+            mInjectionInitPending = false;
+            sp<BasicClient> clientSp = clientDescriptor->getValue();
+            res = checkIfInjectionCameraIsPresent(mInjectionExternalCamId, clientSp);
+            if(res != OK) {
+                return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
+                        "No camera device with ID \"%s\" currently available",
+                        mInjectionExternalCamId.string());
+            }
+            res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
+            if(res != OK) {
+                mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
+            }
+        } else {
+            mInjectionInitPending = true;
+        }
+    }
 
-    mInjectionStatusListener->addListener(callback);
-    *cameraInjectionSession = new CameraInjectionSession(this);
-
-    return ret;
+    return binder::Status::ok();
 }
 
 void CameraService::removeByClient(const BasicClient* client) {
@@ -2588,6 +2835,7 @@
     for (auto& i : mActiveClientManager.getAll()) {
         auto clientSp = i->getValue();
         if (clientSp.get() == client) {
+            cacheClientTagDumpIfNeeded(client->mCameraIdStr, clientSp.get());
             mActiveClientManager.remove(i);
         }
     }
@@ -2664,7 +2912,11 @@
         return sp<BasicClient>{nullptr};
     }
 
-    return clientDescriptorPtr->getValue();
+    sp<BasicClient> client = clientDescriptorPtr->getValue();
+    if (client.get() != nullptr) {
+        cacheClientTagDumpIfNeeded(clientDescriptorPtr->getKey(), client.get());
+    }
+    return client;
 }
 
 void CameraService::doUserSwitch(const std::vector<int32_t>& newUserIds) {
@@ -2947,7 +3199,7 @@
 
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
-        const String16& clientPackageName,
+        const String16& clientPackageName, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
@@ -2955,7 +3207,7 @@
         int servicePid) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
-                clientPackageName, clientFeatureId,
+                clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
                 servicePid),
@@ -2985,13 +3237,14 @@
 
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
-        const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-        const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
-        int clientPid, uid_t clientUid,
+        const String16& clientPackageName, bool nativeClient,
+        const std::optional<String16>& clientFeatureId, const String8& cameraIdStr,
+        int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid):
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
-        mClientPackageName(clientPackageName), mClientFeatureId(clientFeatureId),
+        mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
+        mClientFeatureId(clientFeatureId),
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
@@ -3034,7 +3287,19 @@
         }
         mClientPackageName = packages[0];
     }
-    if (getCurrentServingCall() != BinderCallType::HWBINDER) {
+
+    // There are 2 scenarios in which a client won't have AppOps operations
+    // (both scenarios : native clients)
+    //    1) It's an system native client*, the package name will be empty
+    //       and it will return from this function in the previous if condition
+    //       (This is the same as the previously existing behavior).
+    //    2) It is a system native client, but its package name has been
+    //       modified for debugging, however it still must not use AppOps since
+    //       the package name is not a real one.
+    //
+    //       * system native client - native client with UID < AID_APP_START. It
+    //         doesn't exclude clients not on the system partition.
+    if (!mSystemNativeClient) {
         mAppOpsManager = std::make_unique<AppOpsManager>();
     }
 
@@ -3083,6 +3348,21 @@
     return OK;
 }
 
+status_t CameraService::BasicClient::startWatchingTags(const String8&, int) {
+    // Can't watch tags directly, must go through CameraService::startWatchingTags
+    return OK;
+}
+
+status_t CameraService::BasicClient::stopWatchingTags(int) {
+    // Can't watch tags directly, must go through CameraService::stopWatchingTags
+    return OK;
+}
+
+status_t CameraService::BasicClient::dumpWatchedEventsToVector(std::vector<std::string> &) {
+    // Can't watch tags directly, must go through CameraService::dumpWatchedEventsToVector
+    return OK;
+}
+
 String16 CameraService::BasicClient::getPackageName() const {
     return mClientPackageName;
 }
@@ -3148,8 +3428,7 @@
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
                 mClientPackageName);
         bool isCameraPrivacyEnabled =
-                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
-                    multiuser_get_user_id(mClientUid));
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
         if (!isUidActive || !isCameraPrivacyEnabled) {
             ALOGI("Camera %s: Access for \"%s\" has been restricted",
                     mCameraIdStr.string(), String8(mClientPackageName).string());
@@ -3331,8 +3610,7 @@
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
         bool isCameraPrivacyEnabled =
-                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled(
-                        multiuser_get_user_id(mClientUid));
+                sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
         ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
                 mCameraIdStr.string(), String8(mClientPackageName).string(),
                 mUidIsTrusted, isUidActive);
@@ -3412,7 +3690,8 @@
     status_t res = mAm.linkToDeath(this);
     mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
             | ActivityManager::UID_OBSERVER_IDLE
-            | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE,
+            | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
+            | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
             ActivityManager::PROCESS_STATE_UNKNOWN,
             String16("cameraserver"));
     if (res == OK) {
@@ -3461,9 +3740,9 @@
     bool procStateChange = false;
     {
         Mutex::Autolock _l(mUidLock);
-        if ((mMonitoredUids.find(uid) != mMonitoredUids.end()) &&
-                (mMonitoredUids[uid].first != procState)) {
-            mMonitoredUids[uid].first = procState;
+        if (mMonitoredUids.find(uid) != mMonitoredUids.end() &&
+                mMonitoredUids[uid].procState != procState) {
+            mMonitoredUids[uid].procState = procState;
             procStateChange = true;
         }
     }
@@ -3476,15 +3755,33 @@
     }
 }
 
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
+    bool procAdjChange = false;
+    {
+        Mutex::Autolock _l(mUidLock);
+        if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
+            procAdjChange = true;
+        }
+    }
+
+    if (procAdjChange) {
+        sp<CameraService> service = mService.promote();
+        if (service != nullptr) {
+            service->notifyMonitoredUids();
+        }
+    }
+}
+
 void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
-        it->second.second++;
+        it->second.refCount++;
     } else {
-        mMonitoredUids.emplace(
-                std::pair<uid_t, std::pair<int32_t, size_t>> (uid,
-                    std::pair<int32_t, size_t> (ActivityManager::PROCESS_STATE_NONEXISTENT, 1)));
+        MonitoredUid monitoredUid;
+        monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+        monitoredUid.refCount = 1;
+        mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
     }
 }
 
@@ -3492,8 +3789,8 @@
     Mutex::Autolock _l(mUidLock);
     auto it = mMonitoredUids.find(uid);
     if (it != mMonitoredUids.end()) {
-        it->second.second--;
-        if (it->second.second == 0) {
+        it->second.refCount--;
+        if (it->second.refCount == 0) {
             mMonitoredUids.erase(it);
         }
     } else {
@@ -3571,7 +3868,7 @@
 int32_t CameraService::UidPolicy::getProcStateLocked(uid_t uid) {
     int32_t procState = ActivityManager::PROCESS_STATE_UNKNOWN;
     if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
-        procState = mMonitoredUids[uid].first;
+        procState = mMonitoredUids[uid].procState;
     }
     return procState;
 }
@@ -3644,18 +3941,18 @@
     return mSensorPrivacyEnabled;
 }
 
-bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(userid_t userId) {
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled() {
     if (!hasCameraPrivacyFeature()) {
         return false;
     }
-    return mSpm.isIndividualSensorPrivacyEnabled(userId,
-        SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+    return mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
 }
 
-binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(bool enabled) {
+binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(
+    int toggleType __unused, int sensor __unused, bool enabled) {
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
-        mSensorPrivacyEnabled = enabled;
+        mSensorPrivacyEnabled = mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
     if (enabled) {
@@ -3674,7 +3971,11 @@
 }
 
 bool CameraService::SensorPrivacyPolicy::hasCameraPrivacyFeature() {
-    return mSpm.supportsSensorToggle(SensorPrivacyManager::INDIVIDUAL_SENSOR_CAMERA);
+    bool supportsSoftwareToggle = mSpm.supportsSensorToggle(
+            SensorPrivacyManager::TOGGLE_TYPE_SOFTWARE, SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+    bool supportsHardwareToggle = mSpm.supportsSensorToggle(
+            SensorPrivacyManager::TOGGLE_TYPE_HARDWARE, SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+    return supportsSoftwareToggle || supportsHardwareToggle;
 }
 
 // ----------------------------------------------------------------------------
@@ -3682,9 +3983,10 @@
 // ----------------------------------------------------------------------------
 
 CameraService::CameraState::CameraState(const String8& id, int cost,
-        const std::set<String8>& conflicting, SystemCameraKind systemCameraKind) : mId(id),
+        const std::set<String8>& conflicting, SystemCameraKind systemCameraKind,
+        const std::vector<std::string>& physicalCameras) : mId(id),
         mStatus(StatusInternal::NOT_PRESENT), mCost(cost), mConflicting(conflicting),
-        mSystemCameraKind(systemCameraKind) {}
+        mSystemCameraKind(systemCameraKind), mPhysicalCameras(physicalCameras) {}
 
 CameraService::CameraState::~CameraState() {}
 
@@ -3723,6 +4025,11 @@
     return mSystemCameraKind;
 }
 
+bool CameraService::CameraState::containsPhysicalCamera(const std::string& physicalCameraId) const {
+    return std::find(mPhysicalCameras.begin(), mPhysicalCameras.end(), physicalCameraId)
+            != mPhysicalCameras.end();
+}
+
 bool CameraService::CameraState::addUnavailablePhysicalId(const String8& physicalId) {
     Mutex::Autolock lock(mStatusLock);
     auto result = mUnavailablePhysicalIds.insert(physicalId);
@@ -3735,6 +4042,16 @@
     return count > 0;
 }
 
+void CameraService::CameraState::setClientPackage(const String8& clientPackage) {
+    Mutex::Autolock lock(mStatusLock);
+    mClientPackage = clientPackage;
+}
+
+String8 CameraService::CameraState::getClientPackage() const {
+    Mutex::Autolock lock(mStatusLock);
+    return mClientPackage;
+}
+
 // ----------------------------------------------------------------------------
 //                  ClientEventListener
 // ----------------------------------------------------------------------------
@@ -3825,23 +4142,23 @@
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
         const String8& key, const sp<BasicClient>& value, int32_t cost,
         const std::set<String8>& conflictingKeys, int32_t score, int32_t ownerId,
-        int32_t state, int32_t oomScoreOffset) {
+        int32_t state, int32_t oomScoreOffset, bool systemNativeClient) {
 
-    bool isVendorClient = getCurrentServingCall() == BinderCallType::HWBINDER;
-    int32_t score_adj = isVendorClient ? kVendorClientScore : score;
-    int32_t state_adj = isVendorClient ? kVendorClientState: state;
+    int32_t score_adj = systemNativeClient ? kSystemNativeClientScore : score;
+    int32_t state_adj = systemNativeClient ? kSystemNativeClientState: state;
 
     return std::make_shared<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>(
-            key, value, cost, conflictingKeys, score_adj, ownerId, state_adj, isVendorClient,
-            oomScoreOffset);
+            key, value, cost, conflictingKeys, score_adj, ownerId, state_adj,
+            systemNativeClient, oomScoreOffset);
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
         const sp<BasicClient>& value, const CameraService::DescriptorPtr& partial,
-        int32_t oomScoreOffset) {
+        int32_t oomScoreOffset, bool systemNativeClient) {
     return makeClientDescriptor(partial->getKey(), value, partial->getCost(),
             partial->getConflicting(), partial->getPriority().getScore(),
-            partial->getOwnerId(), partial->getPriority().getState(), oomScoreOffset);
+            partial->getOwnerId(), partial->getPriority().getState(), oomScoreOffset,
+            systemNativeClient);
 }
 
 // ----------------------------------------------------------------------------
@@ -3869,22 +4186,62 @@
 }
 
 void CameraService::InjectionStatusListener::notifyInjectionError(
-        int errorCode) {
-    Mutex::Autolock lock(mListenerLock);
+        String8 injectedCamId, status_t err) {
     if (mCameraInjectionCallback == nullptr) {
         ALOGW("InjectionStatusListener: mCameraInjectionCallback == nullptr");
         return;
     }
-    mCameraInjectionCallback->onInjectionError(errorCode);
+
+    switch (err) {
+        case -ENODEV:
+            mCameraInjectionCallback->onInjectionError(
+                    ICameraInjectionCallback::ERROR_INJECTION_SESSION);
+            ALOGE("No camera device with ID \"%s\" currently available!",
+                    injectedCamId.string());
+            break;
+        case -EBUSY:
+            mCameraInjectionCallback->onInjectionError(
+                    ICameraInjectionCallback::ERROR_INJECTION_SESSION);
+            ALOGE("Higher-priority client using camera, ID \"%s\" currently unavailable!",
+                    injectedCamId.string());
+            break;
+        case DEAD_OBJECT:
+            mCameraInjectionCallback->onInjectionError(
+                    ICameraInjectionCallback::ERROR_INJECTION_SESSION);
+            ALOGE("Camera ID \"%s\" object is dead!",
+                    injectedCamId.string());
+            break;
+        case INVALID_OPERATION:
+            mCameraInjectionCallback->onInjectionError(
+                    ICameraInjectionCallback::ERROR_INJECTION_SESSION);
+            ALOGE("Camera ID \"%s\" encountered an operating or internal error!",
+                    injectedCamId.string());
+            break;
+        case UNKNOWN_TRANSACTION:
+            mCameraInjectionCallback->onInjectionError(
+                    ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED);
+            ALOGE("Camera ID \"%s\" method doesn't support!",
+                    injectedCamId.string());
+            break;
+        default:
+            mCameraInjectionCallback->onInjectionError(
+                    ICameraInjectionCallback::ERROR_INJECTION_INVALID_ERROR);
+            ALOGE("Unexpected error %s (%d) opening camera \"%s\"!",
+                    strerror(-err), err, injectedCamId.string());
+    }
 }
 
 void CameraService::InjectionStatusListener::binderDied(
         const wp<IBinder>& /*who*/) {
-    Mutex::Autolock lock(mListenerLock);
     ALOGV("InjectionStatusListener: ICameraInjectionCallback has died");
     auto parent = mParent.promote();
     if (parent != nullptr) {
-        parent->stopInjectionImpl();
+        auto clientDescriptor = parent->mActiveClientManager.get(parent->mInjectionInternalCamId);
+        if (clientDescriptor != nullptr) {
+            BasicClient* baseClientPtr = clientDescriptor->getValue().get();
+            baseClientPtr->stopInjection();
+        }
+        parent->clearInjectionParameters();
     }
 }
 
@@ -3900,7 +4257,20 @@
         return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_SERVICE,
                 "Camera service encountered error");
     }
-    parent->stopInjectionImpl();
+
+    status_t res = NO_ERROR;
+    auto clientDescriptor = parent->mActiveClientManager.get(parent->mInjectionInternalCamId);
+    if (clientDescriptor != nullptr) {
+        BasicClient* baseClientPtr = clientDescriptor->getValue().get();
+        res = baseClientPtr->stopInjection();
+        if (res != OK) {
+            ALOGE("CameraInjectionSession: Failed to stop the injection camera!"
+                " ret != NO_ERROR: %d", res);
+            return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_SESSION,
+                "Camera session encountered error");
+        }
+    }
+    parent->clearInjectionParameters();
     return binder::Status::ok();
 }
 
@@ -4041,7 +4411,7 @@
 
     // Dump camera traces if there were any
     dprintf(fd, "\n");
-    camera3::CameraTraces::dump(fd, args);
+    camera3::CameraTraces::dump(fd);
 
     // Process dump arguments, if any
     int n = args.size();
@@ -4100,8 +4470,9 @@
 void CameraService::dumpOpenSessionClientLogs(int fd,
         const Vector<String16>& args, const String8& cameraId) {
     auto clientDescriptor = mActiveClientManager.get(cameraId);
-    dprintf(fd, "  Device %s is open. Client instance dump:\n",
-        cameraId.string());
+    dprintf(fd, "  %s : Device %s is open. Client instance dump:\n",
+            getFormattedCurrentTime().string(),
+            cameraId.string());
     dprintf(fd, "    Client priority score: %d state: %d\n",
         clientDescriptor->getPriority().getScore(),
         clientDescriptor->getPriority().getState());
@@ -4135,6 +4506,45 @@
     dprintf(fd, "\n");
 }
 
+void CameraService::cacheClientTagDumpIfNeeded(const char *cameraId, BasicClient* client) {
+    Mutex::Autolock lock(mLogLock);
+    if (!isClientWatchedLocked(client)) { return; }
+
+    std::vector<std::string> dumpVector;
+    client->dumpWatchedEventsToVector(dumpVector);
+
+    if (dumpVector.empty()) { return; }
+
+    std::string dumpString;
+
+    String8 currentTime = getFormattedCurrentTime();
+    dumpString += "Cached @ ";
+    dumpString += currentTime.string();
+    dumpString += "\n"; // First line is the timestamp of when client is cached.
+
+
+    const String16 &packageName = client->getPackageName();
+
+    String8 packageName8 = String8(packageName);
+    const char *printablePackageName = packageName8.lockBuffer(packageName.size());
+
+
+    size_t i = dumpVector.size();
+
+    // Store the string in reverse order (latest last)
+    while (i > 0) {
+         i--;
+         dumpString += cameraId;
+         dumpString += ":";
+         dumpString += printablePackageName;
+         dumpString += "  ";
+         dumpString += dumpVector[i]; // implicitly ends with '\n'
+    }
+
+    packageName8.unlockBuffer();
+    mWatchedClientsDumpCache[packageName] = dumpString;
+}
+
 void CameraService::handleTorchClientBinderDied(const wp<IBinder> &who) {
     Mutex::Autolock al(mTorchClientMapMutex);
     for (size_t i = 0; i < mTorchClientMap.size(); i++) {
@@ -4245,6 +4655,18 @@
 
 void CameraService::updateOpenCloseStatus(const String8& cameraId, bool open,
         const String16& clientPackageName) {
+    auto state = getCameraState(cameraId);
+    if (state == nullptr) {
+        ALOGW("%s: Could not update the status for %s, no such device exists", __FUNCTION__,
+                cameraId.string());
+        return;
+    }
+    if (open) {
+        state->setClientPackage(String8(clientPackageName));
+    } else {
+        state->setClientPackage(String8::empty());
+    }
+
     Mutex::Autolock lock(mStatusListenerLock);
 
     for (const auto& it : mListenerList) {
@@ -4341,18 +4763,9 @@
     std::list<String16> retList;
     Mutex::Autolock lock(mCameraStatesLock);
     for (const auto& state : mCameraStates) {
-        std::vector<std::string> physicalCameraIds;
-        if (!mCameraProviderManager->isLogicalCamera(state.first.c_str(), &physicalCameraIds)) {
-            // This is not a logical multi-camera.
-            continue;
+        if (state.second->containsPhysicalCamera(physicalCameraId.c_str())) {
+            retList.emplace_back(String16(state.first));
         }
-        if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalCameraId.c_str())
-                == physicalCameraIds.end()) {
-            // cameraId is not a physical camera of this logical multi-camera.
-            continue;
-        }
-
-        retList.emplace_back(String16(state.first));
     }
     return retList;
 }
@@ -4427,9 +4840,11 @@
         return handleGetImageDumpMask(out);
     } else if (args.size() >= 2 && args[0] == String16("set-camera-mute")) {
         return handleSetCameraMute(args);
+    } else if (args.size() >= 2 && args[0] == String16("watch")) {
+        return handleWatchCommand(args, in, out);
     } else if (args.size() == 1 && args[0] == String16("help")) {
         printHelp(out);
-        return NO_ERROR;
+        return OK;
     }
     printHelp(err);
     return BAD_VALUE;
@@ -4573,6 +4988,348 @@
     return OK;
 }
 
+status_t CameraService::handleWatchCommand(const Vector<String16>& args, int inFd, int outFd) {
+    if (args.size() >= 3 && args[1] == String16("start")) {
+        return startWatchingTags(args, outFd);
+    } else if (args.size() == 2 && args[1] == String16("stop")) {
+        return stopWatchingTags(outFd);
+    } else if (args.size() == 2 && args[1] == String16("dump")) {
+        return printWatchedTags(outFd);
+    } else if (args.size() >= 2 && args[1] == String16("live")) {
+        return printWatchedTagsUntilInterrupt(args, inFd, outFd);
+    } else if (args.size() == 2 && args[1] == String16("clear")) {
+        return clearCachedMonitoredTagDumps(outFd);
+    }
+    dprintf(outFd, "Camera service watch commands:\n"
+                 "  start -m <comma_separated_tag_list> [-c <comma_separated_client_list>]\n"
+                 "        starts watching the provided tags for clients with provided package\n"
+                 "        recognizes tag shorthands like '3a'\n"
+                 "        watches all clients if no client is passed, or if 'all' is listed\n"
+                 "  dump dumps the monitoring information and exits\n"
+                 "  stop stops watching all tags\n"
+                 "  live [-n <refresh_interval_ms>]\n"
+                 "        prints the monitored information in real time\n"
+                 "        Hit return to exit\n"
+                 "  clear clears all buffers storing information for watch command");
+  return BAD_VALUE;
+}
+
+status_t CameraService::startWatchingTags(const Vector<String16> &args, int outFd) {
+    Mutex::Autolock lock(mLogLock);
+    size_t tagsIdx; // index of '-m'
+    String16 tags("");
+    for (tagsIdx = 2; tagsIdx < args.size() && args[tagsIdx] != String16("-m"); tagsIdx++);
+    if (tagsIdx < args.size() - 1) {
+        tags = args[tagsIdx + 1];
+    } else {
+        dprintf(outFd, "No tags provided.\n");
+        return BAD_VALUE;
+    }
+
+    size_t clientsIdx; // index of '-c'
+    String16 clients = kWatchAllClientsFlag; // watch all clients if no clients are provided
+    for (clientsIdx = 2; clientsIdx < args.size() && args[clientsIdx] != String16("-c");
+         clientsIdx++);
+    if (clientsIdx < args.size() - 1) {
+        clients = args[clientsIdx + 1];
+    }
+    parseClientsToWatchLocked(String8(clients));
+
+    // track tags to initialize future clients with the monitoring information
+    mMonitorTags = String8(tags);
+
+    bool serviceLock = tryLock(mServiceLock);
+    int numWatchedClients = 0;
+    auto cameraClients = mActiveClientManager.getAll();
+    for (const auto &clientDescriptor: cameraClients) {
+        if (clientDescriptor == nullptr) { continue; }
+        sp<BasicClient> client = clientDescriptor->getValue();
+        if (client.get() == nullptr) { continue; }
+
+        if (isClientWatchedLocked(client.get())) {
+            client->startWatchingTags(mMonitorTags, outFd);
+            numWatchedClients++;
+        }
+    }
+    dprintf(outFd, "Started watching %d active clients\n", numWatchedClients);
+
+    if (serviceLock) { mServiceLock.unlock(); }
+    return OK;
+}
+
+status_t CameraService::stopWatchingTags(int outFd) {
+    // clear mMonitorTags to prevent new clients from monitoring tags at initialization
+    Mutex::Autolock lock(mLogLock);
+    mMonitorTags = String8::empty();
+
+    mWatchedClientPackages.clear();
+    mWatchedClientsDumpCache.clear();
+
+    bool serviceLock = tryLock(mServiceLock);
+    auto cameraClients = mActiveClientManager.getAll();
+    for (const auto &clientDescriptor : cameraClients) {
+        if (clientDescriptor == nullptr) { continue; }
+        sp<BasicClient> client = clientDescriptor->getValue();
+        if (client.get() == nullptr) { continue; }
+        client->stopWatchingTags(outFd);
+    }
+    dprintf(outFd, "Stopped watching all clients.\n");
+    if (serviceLock) { mServiceLock.unlock(); }
+    return OK;
+}
+
+status_t CameraService::clearCachedMonitoredTagDumps(int outFd) {
+    Mutex::Autolock lock(mLogLock);
+    size_t clearedSize = mWatchedClientsDumpCache.size();
+    mWatchedClientsDumpCache.clear();
+    dprintf(outFd, "Cleared tag information of %zu cached clients.\n", clearedSize);
+    return OK;
+}
+
+status_t CameraService::printWatchedTags(int outFd) {
+    Mutex::Autolock logLock(mLogLock);
+    std::set<String16> connectedMonitoredClients;
+
+    bool printedSomething = false; // tracks if any monitoring information was printed
+                                   // (from either cached or active clients)
+
+    bool serviceLock = tryLock(mServiceLock);
+    // get all watched clients that are currently connected
+    for (const auto &clientDescriptor: mActiveClientManager.getAll()) {
+        if (clientDescriptor == nullptr) { continue; }
+
+        sp<BasicClient> client = clientDescriptor->getValue();
+        if (client.get() == nullptr) { continue; }
+        if (!isClientWatchedLocked(client.get())) { continue; }
+
+        std::vector<std::string> dumpVector;
+        client->dumpWatchedEventsToVector(dumpVector);
+
+        size_t printIdx = dumpVector.size();
+        if (printIdx == 0) {
+            continue;
+        }
+
+        // Print tag dumps for active client
+        const String8 &cameraId = clientDescriptor->getKey();
+        String8 packageName8 = String8(client->getPackageName());
+        const char *printablePackageName = packageName8.lockBuffer(packageName8.size());
+        dprintf(outFd, "Client: %s (active)\n", printablePackageName);
+        while(printIdx > 0) {
+            printIdx--;
+            dprintf(outFd, "%s:%s  %s", cameraId.string(), printablePackageName,
+                    dumpVector[printIdx].c_str());
+        }
+        dprintf(outFd, "\n");
+        packageName8.unlockBuffer();
+        printedSomething = true;
+
+        connectedMonitoredClients.emplace(client->getPackageName());
+    }
+    if (serviceLock) { mServiceLock.unlock(); }
+
+    // Print entries in mWatchedClientsDumpCache for clients that are not connected
+    for (const auto &kv: mWatchedClientsDumpCache) {
+        const String16 &package = kv.first;
+        if (connectedMonitoredClients.find(package) != connectedMonitoredClients.end()) {
+            continue;
+        }
+
+        dprintf(outFd, "Client: %s (cached)\n", String8(package).string());
+        dprintf(outFd, "%s\n", kv.second.c_str());
+        printedSomething = true;
+    }
+
+    if (!printedSomething) {
+        dprintf(outFd, "No monitoring information to print.\n");
+    }
+
+    return OK;
+}
+
+// Print all events in vector `events' that came after lastPrintedEvent
+void printNewWatchedEvents(int outFd,
+                           const char *cameraId,
+                           const String16 &packageName,
+                           const std::vector<std::string> &events,
+                           const std::string &lastPrintedEvent) {
+    if (events.empty()) { return; }
+
+    // index of lastPrintedEvent in events.
+    // lastPrintedIdx = events.size() if lastPrintedEvent is not in events
+    size_t lastPrintedIdx;
+    for (lastPrintedIdx = 0;
+         lastPrintedIdx < events.size() && lastPrintedEvent != events[lastPrintedIdx];
+         lastPrintedIdx++);
+
+    if (lastPrintedIdx == 0) { return; } // early exit if no new event in `events`
+
+    String8 packageName8(packageName);
+    const char *printablePackageName = packageName8.lockBuffer(packageName8.size());
+
+    // print events in chronological order (latest event last)
+    size_t idxToPrint = lastPrintedIdx;
+    do {
+        idxToPrint--;
+        dprintf(outFd, "%s:%s  %s", cameraId, printablePackageName, events[idxToPrint].c_str());
+    } while (idxToPrint != 0);
+
+    packageName8.unlockBuffer();
+}
+
+// Returns true if adb shell cmd watch should be interrupted based on data in inFd. The watch
+// command should be interrupted if the user presses the return key, or if user loses any way to
+// signal interrupt.
+// If timeoutMs == 0, this function will always return false
+bool shouldInterruptWatchCommand(int inFd, int outFd, long timeoutMs) {
+    struct timeval startTime;
+    int startTimeError = gettimeofday(&startTime, nullptr);
+    if (startTimeError) {
+        dprintf(outFd, "Failed waiting for interrupt, aborting.\n");
+        return true;
+    }
+
+    const nfds_t numFds = 1;
+    struct pollfd pollFd = { .fd = inFd, .events = POLLIN, .revents = 0 };
+
+    struct timeval currTime;
+    char buffer[2];
+    while(true) {
+        int currTimeError = gettimeofday(&currTime, nullptr);
+        if (currTimeError) {
+            dprintf(outFd, "Failed waiting for interrupt, aborting.\n");
+            return true;
+        }
+
+        long elapsedTimeMs = ((currTime.tv_sec - startTime.tv_sec) * 1000L)
+                + ((currTime.tv_usec - startTime.tv_usec) / 1000L);
+        int remainingTimeMs = (int) (timeoutMs - elapsedTimeMs);
+
+        if (remainingTimeMs <= 0) {
+            // No user interrupt within timeoutMs, don't interrupt watch command
+            return false;
+        }
+
+        int numFdsUpdated = poll(&pollFd, numFds, remainingTimeMs);
+        if (numFdsUpdated < 0) {
+            dprintf(outFd, "Failed while waiting for user input. Exiting.\n");
+            return true;
+        }
+
+        if (numFdsUpdated == 0) {
+            // No user input within timeoutMs, don't interrupt watch command
+            return false;
+        }
+
+        if (!(pollFd.revents & POLLIN)) {
+            dprintf(outFd, "Failed while waiting for user input. Exiting.\n");
+            return true;
+        }
+
+        ssize_t sizeRead = read(inFd, buffer, sizeof(buffer) - 1);
+        if (sizeRead < 0) {
+            dprintf(outFd, "Error reading user input. Exiting.\n");
+            return true;
+        }
+
+        if (sizeRead == 0) {
+            // Reached end of input fd (can happen if input is piped)
+            // User has no way to signal an interrupt, so interrupt here
+            return true;
+        }
+
+        if (buffer[0] == '\n') {
+            // User pressed return, interrupt watch command.
+            return true;
+        }
+    }
+}
+
+status_t CameraService::printWatchedTagsUntilInterrupt(const Vector<String16> &args,
+                                                       int inFd, int outFd) {
+    // Figure out refresh interval, if present in args
+    long refreshTimeoutMs = 1000L; // refresh every 1s by default
+    if (args.size() > 2) {
+        size_t intervalIdx; // index of '-n'
+        for (intervalIdx = 2; intervalIdx < args.size() && String16("-n") != args[intervalIdx];
+             intervalIdx++);
+
+        size_t intervalValIdx = intervalIdx + 1;
+        if (intervalValIdx < args.size()) {
+            refreshTimeoutMs = strtol(String8(args[intervalValIdx].string()), nullptr, 10);
+            if (errno) { return BAD_VALUE; }
+        }
+    }
+
+    // Set min timeout of 10ms. This prevents edge cases in polling when timeout of 0 is passed.
+    refreshTimeoutMs = refreshTimeoutMs < 10 ? 10 : refreshTimeoutMs;
+
+    dprintf(outFd, "Press return to exit...\n\n");
+    std::map<String16, std::string> packageNameToLastEvent;
+
+    while (true) {
+        bool serviceLock = tryLock(mServiceLock);
+        auto cameraClients = mActiveClientManager.getAll();
+        if (serviceLock) { mServiceLock.unlock(); }
+
+        for (const auto& clientDescriptor : cameraClients) {
+            Mutex::Autolock lock(mLogLock);
+            if (clientDescriptor == nullptr) { continue; }
+
+            sp<BasicClient> client = clientDescriptor->getValue();
+            if (client.get() == nullptr) { continue; }
+            if (!isClientWatchedLocked(client.get())) { continue; }
+
+            const String16 &packageName = client->getPackageName();
+            // This also initializes the map entries with an empty string
+            const std::string& lastPrintedEvent = packageNameToLastEvent[packageName];
+
+            std::vector<std::string> latestEvents;
+            client->dumpWatchedEventsToVector(latestEvents);
+
+            if (!latestEvents.empty()) {
+                String8 cameraId = clientDescriptor->getKey();
+                const char *printableCameraId = cameraId.lockBuffer(cameraId.size());
+                printNewWatchedEvents(outFd,
+                                      printableCameraId,
+                                      packageName,
+                                      latestEvents,
+                                      lastPrintedEvent);
+                packageNameToLastEvent[packageName] = latestEvents[0];
+                cameraId.unlockBuffer();
+            }
+        }
+        if (shouldInterruptWatchCommand(inFd, outFd, refreshTimeoutMs)) {
+            break;
+        }
+    }
+    return OK;
+}
+
+void CameraService::parseClientsToWatchLocked(String8 clients) {
+    mWatchedClientPackages.clear();
+
+    const char *allSentinel = String8(kWatchAllClientsFlag).string();
+
+    char *tokenized = clients.lockBuffer(clients.size());
+    char *savePtr;
+    char *nextClient = strtok_r(tokenized, ",", &savePtr);
+
+    while (nextClient != nullptr) {
+        if (strcmp(nextClient, allSentinel) == 0) {
+            // Don't need to track any other package if 'all' is present
+            mWatchedClientPackages.clear();
+            mWatchedClientPackages.emplace(kWatchAllClientsFlag);
+            break;
+        }
+
+        // track package names
+        mWatchedClientPackages.emplace(nextClient);
+        nextClient = strtok_r(nullptr, ",", &savePtr);
+    }
+    clients.unlockBuffer();
+}
+
 status_t CameraService::printHelp(int out) {
     return dprintf(out, "Camera service commands:\n"
         "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
@@ -4585,9 +5342,20 @@
         "      Valid values 0=OFF, 1=ON for JPEG\n"
         "  get-image-dump-mask returns the current image-dump-mask value\n"
         "  set-camera-mute <0/1> enable or disable camera muting\n"
+        "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
         "  help print this message\n");
 }
 
+bool CameraService::isClientWatched(const BasicClient *client) {
+    Mutex::Autolock lock(mLogLock);
+    return isClientWatchedLocked(client);
+}
+
+bool CameraService::isClientWatchedLocked(const BasicClient *client) {
+    return mWatchedClientPackages.find(kWatchAllClientsFlag) != mWatchedClientPackages.end() ||
+           mWatchedClientPackages.find(client->getPackageName()) != mWatchedClientPackages.end();
+}
+
 int32_t CameraService::updateAudioRestriction() {
     Mutex::Autolock lock(mServiceLock);
     return updateAudioRestrictionLocked();
@@ -4609,10 +5377,43 @@
     return mode;
 }
 
-void CameraService::stopInjectionImpl() {
-    mInjectionStatusListener->removeListener();
+status_t CameraService::checkIfInjectionCameraIsPresent(const String8& externalCamId,
+        sp<BasicClient> clientSp) {
+    std::unique_ptr<AutoConditionLock> lock =
+            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
+    status_t res = NO_ERROR;
+    if ((res = checkIfDeviceIsUsable(externalCamId)) != NO_ERROR) {
+        ALOGW("Device %s is not usable!", externalCamId.string());
+        mInjectionStatusListener->notifyInjectionError(
+                externalCamId, UNKNOWN_TRANSACTION);
+        clientSp->notifyError(
+                hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
+                CaptureResultExtras());
 
-    // TODO: Implement the stop injection function.
+        // Do not hold mServiceLock while disconnecting clients, but retain the condition blocking
+        // other clients from connecting in mServiceLockWrapper if held
+        mServiceLock.unlock();
+
+        // Clear caller identity temporarily so client disconnect PID checks work correctly
+        int64_t token = CameraThreadState::clearCallingIdentity();
+        clientSp->disconnect();
+        CameraThreadState::restoreCallingIdentity(token);
+
+        // Reacquire mServiceLock
+        mServiceLock.lock();
+    }
+
+    return res;
+}
+
+void CameraService::clearInjectionParameters() {
+    {
+        Mutex::Autolock lock(mInjectionParametersLock);
+        mInjectionInitPending = false;
+        mInjectionInternalCamId = "";
+    }
+    mInjectionExternalCamId = "";
+    mInjectionStatusListener->removeListener();
 }
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index bc2e347..d96ea00 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -47,6 +47,7 @@
 #include "media/RingBuffer.h"
 #include "utils/AutoConditionLock.h"
 #include "utils/ClientManager.h"
+#include "utils/IPCTransport.h"
 
 #include <set>
 #include <string>
@@ -106,12 +107,20 @@
     // HAL Callbacks - implements CameraProviderManager::StatusListener
 
     virtual void        onDeviceStatusChanged(const String8 &cameraId,
-            hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
+            CameraDeviceStatus newHalStatus) override;
     virtual void        onDeviceStatusChanged(const String8 &cameraId,
             const String8 &physicalCameraId,
-            hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
+            CameraDeviceStatus newHalStatus) override;
+    // This method may hold CameraProviderManager::mInterfaceMutex as a part
+    // of calling getSystemCameraKind() internally. Care should be taken not to
+    // directly / indirectly call this from callers who also hold
+    // mInterfaceMutex.
     virtual void        onTorchStatusChanged(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
+            TorchModeStatus newStatus) override;
+    // Does not hold CameraProviderManager::mInterfaceMutex.
+    virtual void        onTorchStatusChanged(const String8& cameraId,
+            TorchModeStatus newStatus,
+            SystemCameraKind kind) override;
     virtual void        onNewProviderRegistered() override;
 
     /////////////////////////////////////////////////////////////////////
@@ -164,6 +173,12 @@
     virtual binder::Status    setTorchMode(const String16& cameraId, bool enabled,
             const sp<IBinder>& clientBinder);
 
+    virtual binder::Status    turnOnTorchWithStrengthLevel(const String16& cameraId,
+            int32_t torchStrength, const sp<IBinder>& clientBinder);
+
+    virtual binder::Status    getTorchStrengthLevel(const String16& cameraId,
+            int32_t* torchStrength);
+
     virtual binder::Status    notifySystemEvent(int32_t eventId,
             const std::vector<int32_t>& args);
 
@@ -228,7 +243,7 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    int                 getDeviceVersion(const String8& cameraId, int* facing = nullptr,
+    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId, int* facing = nullptr,
             int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
@@ -272,6 +287,10 @@
         // Internal dump method to be called by CameraService
         virtual status_t dumpClient(int fd, const Vector<String16>& args) = 0;
 
+        virtual status_t startWatchingTags(const String8 &tags, int outFd);
+        virtual status_t stopWatchingTags(int outFd);
+        virtual status_t dumpWatchedEventsToVector(std::vector<std::string> &out);
+
         // Return the package name for this client
         virtual String16 getPackageName() const;
 
@@ -320,10 +339,19 @@
         // Set/reset camera mute
         virtual status_t setCameraMute(bool enabled) = 0;
 
+        // The injection camera session to replace the internal camera
+        // session.
+        virtual status_t injectCamera(const String8& injectedCamId,
+                sp<CameraProviderManager> manager) = 0;
+
+        // Stop the injection camera and restore to internal camera session.
+        virtual status_t stopInjection() = 0;
+
     protected:
         BasicClient(const sp<CameraService>& cameraService,
                 const sp<IBinder>& remoteCallback,
                 const String16& clientPackageName,
+                bool nativeClient,
                 const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int cameraFacing,
@@ -346,6 +374,7 @@
         const int                       mCameraFacing;
         const int                       mOrientation;
         String16                        mClientPackageName;
+        bool                            mSystemNativeClient;
         std::optional<String16>         mClientFeatureId;
         pid_t                           mClientPid;
         const uid_t                     mClientUid;
@@ -433,6 +462,7 @@
         Client(const sp<CameraService>& cameraService,
                 const sp<hardware::ICameraClient>& cameraClient,
                 const String16& clientPackageName,
+                bool systemNativeClient,
                 const std::optional<String16>& clientFeatureId,
                 const String8& cameraIdStr,
                 int api1CameraId,
@@ -516,14 +546,15 @@
          */
         static DescriptorPtr makeClientDescriptor(const String8& key, const sp<BasicClient>& value,
                 int32_t cost, const std::set<String8>& conflictingKeys, int32_t score,
-                int32_t ownerId, int32_t state, int oomScoreOffset);
+                int32_t ownerId, int32_t state, int oomScoreOffset, bool systemNativeClient);
 
         /**
          * Make a ClientDescriptor object wrapping the given BasicClient strong pointer with
          * values intialized from a prior ClientDescriptor.
          */
         static DescriptorPtr makeClientDescriptor(const sp<BasicClient>& value,
-                const CameraService::DescriptorPtr& partial, int oomScoreOffset);
+                const CameraService::DescriptorPtr& partial, int oomScoreOffset,
+                bool systemNativeClient);
 
     }; // class CameraClientManager
 
@@ -532,8 +563,6 @@
 
 private:
 
-    typedef hardware::camera::common::V1_0::CameraDeviceStatus CameraDeviceStatus;
-
     /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
@@ -563,7 +592,7 @@
          * returned in the HAL's camera_info struct for each device.
          */
         CameraState(const String8& id, int cost, const std::set<String8>& conflicting,
-                SystemCameraKind deviceKind);
+                SystemCameraKind deviceKind, const std::vector<std::string>& physicalCameras);
         virtual ~CameraState();
 
         /**
@@ -621,12 +650,24 @@
         SystemCameraKind getSystemCameraKind() const;
 
         /**
+         * Return whether this camera is a logical multi-camera and has a
+         * particular physical sub-camera.
+         */
+        bool containsPhysicalCamera(const std::string& physicalCameraId) const;
+
+        /**
          * Add/Remove the unavailable physical camera ID.
          */
         bool addUnavailablePhysicalId(const String8& physicalId);
         bool removeUnavailablePhysicalId(const String8& physicalId);
 
         /**
+         * Set and get client package name.
+         */
+        void setClientPackage(const String8& clientPackage);
+        String8 getClientPackage() const;
+
+        /**
          * Return the unavailable physical ids for this device.
          *
          * This method acquires mStatusLock.
@@ -638,9 +679,11 @@
         const int mCost;
         std::set<String8> mConflicting;
         std::set<String8> mUnavailablePhysicalIds;
+        String8 mClientPackage;
         mutable Mutex mStatusLock;
         CameraParameters mShimParams;
         const SystemCameraKind mSystemCameraKind;
+        const std::vector<std::string> mPhysicalCameras; // Empty if not a logical multi-camera
     }; // class CameraState
 
     // Observer for UID lifecycle enforcing that UIDs in idle
@@ -656,11 +699,13 @@
         bool isUidActive(uid_t uid, String16 callingPackage);
         int32_t getProcState(uid_t uid);
 
-        void onUidGone(uid_t uid, bool disabled);
-        void onUidActive(uid_t uid);
-        void onUidIdle(uid_t uid, bool disabled);
+        // IUidObserver
+        void onUidGone(uid_t uid, bool disabled) override;
+        void onUidActive(uid_t uid) override;
+        void onUidIdle(uid_t uid, bool disabled) override;
         void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
-                int32_t capability);
+                int32_t capability) override;
+        void onUidProcAdjChanged(uid_t uid) override;
 
         void addOverrideUid(uid_t uid, String16 callingPackage, bool active);
         void removeOverrideUid(uid_t uid, String16 callingPackage);
@@ -675,13 +720,18 @@
         int32_t getProcStateLocked(uid_t uid);
         void updateOverrideUid(uid_t uid, String16 callingPackage, bool active, bool insert);
 
+        struct MonitoredUid {
+            int32_t procState;
+            size_t refCount;
+        };
+
         Mutex mUidLock;
         bool mRegistered;
         ActivityManager mAm;
         wp<CameraService> mService;
         std::unordered_set<uid_t> mActiveUids;
-        // Monitored uid map to cached procState and refCount pair
-        std::unordered_map<uid_t, std::pair<int32_t, size_t>> mMonitoredUids;
+        // Monitored uid map
+        std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
         std::unordered_map<uid_t, bool> mOverrideUids;
     }; // class UidPolicy
 
@@ -697,9 +747,10 @@
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
-            bool isCameraPrivacyEnabled(userid_t userId);
+            bool isCameraPrivacyEnabled();
 
-            binder::Status onSensorPrivacyChanged(bool enabled);
+            binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
+                                                  bool enabled);
 
             // IBinder::DeathRecipient implementation
             virtual void binderDied(const wp<IBinder> &who);
@@ -745,7 +796,7 @@
     // Only call with with mServiceLock held.
     status_t handleEvictionsLocked(const String8& cameraId, int clientPid,
         apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName,
-        int scoreOffset,
+        int scoreOffset, bool systemNativeClient,
         /*out*/
         sp<BasicClient>* client,
         std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial);
@@ -777,7 +828,7 @@
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int api1CameraId, const String16& clientPackageName,
+            int api1CameraId, const String16& clientPackageName, bool systemNativeClient,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
             /*out*/sp<CLIENT>& device);
@@ -810,6 +861,14 @@
     RingBuffer<String8> mEventLog;
     Mutex mLogLock;
 
+    // set of client package names to watch. if this set contains 'all', then all clients will
+    // be watched. Access should be guarded by mLogLock
+    std::set<String16> mWatchedClientPackages;
+    // cache of last monitored tags dump immediately before the client disconnects. If a client
+    // re-connects, its entry is not updated until it disconnects again. Access should be guarded
+    // by mLogLock
+    std::map<String16, std::string> mWatchedClientsDumpCache;
+
     // The last monitored tags set by client
     String8 mMonitorTags;
 
@@ -846,7 +905,7 @@
      * This method must be called with mServiceLock held.
      */
     void finishConnectLocked(const sp<BasicClient>& client, const DescriptorPtr& desc,
-            int oomScoreOffset);
+            int oomScoreOffset, bool systemNativeClient);
 
     /**
      * Returns the underlying camera Id string mapped to a camera id int
@@ -942,6 +1001,8 @@
      */
     void dumpEventLog(int fd);
 
+    void cacheClientTagDumpIfNeeded(const char *cameraId, BasicClient *client);
+
     /**
      * This method will acquire mServiceLock
      */
@@ -1052,7 +1113,7 @@
     // guard mTorchUidMap
     Mutex                mTorchUidMapMutex;
     // camera id -> torch status
-    KeyedVector<String8, hardware::camera::common::V1_0::TorchModeStatus>
+    KeyedVector<String8, TorchModeStatus>
             mTorchStatusMap;
     // camera id -> torch client binder
     // only store the last client that turns on each camera's torch mode
@@ -1066,16 +1127,16 @@
     // handle torch mode status change and invoke callbacks. mTorchStatusMutex
     // should be locked.
     void onTorchStatusChangedLocked(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus newStatus,
+            TorchModeStatus newStatus,
             SystemCameraKind systemCameraKind);
 
     // get a camera's torch status. mTorchStatusMutex should be locked.
     status_t getTorchStatusLocked(const String8 &cameraId,
-             hardware::camera::common::V1_0::TorchModeStatus *status) const;
+             TorchModeStatus *status) const;
 
     // set a camera's torch status. mTorchStatusMutex should be locked.
     status_t setTorchStatusLocked(const String8 &cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus status);
+            TorchModeStatus status);
 
     // notify physical camera status when the physical camera is public.
     // Expects mStatusListenerLock to be locked.
@@ -1134,9 +1195,43 @@
     // Set the camera mute state
     status_t handleSetCameraMute(const Vector<String16>& args);
 
+    // Handle 'watch' command as passed through 'cmd'
+    status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
+
+    // Enable tag monitoring of the given tags in provided clients
+    status_t startWatchingTags(const Vector<String16> &args, int outFd);
+
+    // Disable tag monitoring
+    status_t stopWatchingTags(int outFd);
+
+    // Clears mWatchedClientsDumpCache
+    status_t clearCachedMonitoredTagDumps(int outFd);
+
+    // Print events of monitored tags in all cached and attached clients
+    status_t printWatchedTags(int outFd);
+
+    // Print events of monitored tags in all attached clients as they are captured. New events are
+    // fetched every `refreshMillis` ms
+    // NOTE: This function does not terminate until user passes '\n' to inFd.
+    status_t printWatchedTagsUntilInterrupt(const Vector<String16> &args, int inFd, int outFd);
+
+    // Parses comma separated clients list and adds them to mWatchedClientPackages.
+    // Does not acquire mLogLock before modifying mWatchedClientPackages. It is the caller's
+    // responsibility to acquire mLogLock before calling this function.
+    void parseClientsToWatchLocked(String8 clients);
+
     // Prints the shell command help
     status_t printHelp(int out);
 
+    // Returns true if client should monitor tags based on the contents of mWatchedClientPackages.
+    // Acquires mLogLock before querying mWatchedClientPackages.
+    bool isClientWatched(const BasicClient *client);
+
+    // Returns true if client should monitor tags based on the contents of mWatchedClientPackages.
+    // Does not acquire mLogLock before querying mWatchedClientPackages. It is the caller's
+    // responsibility to acquire mLogLock before calling this functions.
+    bool isClientWatchedLocked(const BasicClient *client);
+
     /**
      * Get the current system time as a formatted string.
      */
@@ -1144,22 +1239,24 @@
 
     static binder::Status makeClient(const sp<CameraService>& cameraService,
             const sp<IInterface>& cameraCb, const String16& packageName,
-            const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
-            int deviceVersion, apiLevel effectiveApiLevel, bool overrideForPerfClass,
-            /*out*/sp<BasicClient>* client);
+            bool systemNativeClient, const std::optional<String16>& featureId,
+            const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
+            int clientPid, uid_t clientUid, int servicePid,
+            std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
+            bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
     static String8 toString(std::set<userid_t> intSet);
-    static int32_t mapToInterface(hardware::camera::common::V1_0::TorchModeStatus status);
-    static StatusInternal mapToInternal(hardware::camera::common::V1_0::CameraDeviceStatus status);
+    static int32_t mapToInterface(TorchModeStatus status);
+    static StatusInternal mapToInternal(CameraDeviceStatus status);
     static int32_t mapToInterface(StatusInternal status);
 
 
     void broadcastTorchModeStatus(const String8& cameraId,
-            hardware::camera::common::V1_0::TorchModeStatus status,
-            SystemCameraKind systemCameraKind);
+            TorchModeStatus status, SystemCameraKind systemCameraKind);
+
+    void broadcastTorchStrengthLevel(const String8& cameraId, int32_t newTorchStrengthLevel);
 
     void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
 
@@ -1167,6 +1264,10 @@
     // Use separate keys for offline devices.
     static const String8 kOfflineDevice;
 
+    // Sentinel value to be stored in `mWatchedClientsPackages` to indicate that all clients should
+    // be watched.
+    static const String16 kWatchAllClientsFlag;
+
     // TODO: right now each BasicClient holds one AppOpsManager instance.
     // We can refactor the code so all of clients share this instance
     AppOpsManager mAppOps;
@@ -1194,7 +1295,7 @@
 
             void addListener(const sp<hardware::camera2::ICameraInjectionCallback>& callback);
             void removeListener();
-            void notifyInjectionError(int errorCode);
+            void notifyInjectionError(String8 injectedCamId, status_t err);
 
             // IBinder::DeathRecipient implementation
             virtual void binderDied(const wp<IBinder>& who);
@@ -1221,7 +1322,22 @@
             wp<CameraService> mParent;
     };
 
-    void stopInjectionImpl();
+    // When injecting the camera, it will check whether the injecting camera status is unavailable.
+    // If it is, the disconnect function will be called to to prevent camera access on the device.
+    status_t checkIfInjectionCameraIsPresent(const String8& externalCamId,
+            sp<BasicClient> clientSp);
+
+    void clearInjectionParameters();
+
+    // This is the existing camera id being replaced.
+    String8 mInjectionInternalCamId;
+    // This is the external camera Id replacing the internalId.
+    String8 mInjectionExternalCamId;
+    bool mInjectionInitPending = false;
+    // Guard mInjectionInternalCamId and mInjectionInitPending.
+    Mutex mInjectionParametersLock;
+
+    void updateTorchUidMapLocked(const String16& cameraId, int uid);
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
new file mode 100644
index 0000000..a169667
--- /dev/null
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraServiceWatchdog"
+
+#include "CameraServiceWatchdog.h"
+
+namespace android {
+
+bool CameraServiceWatchdog::threadLoop()
+{
+    {
+        AutoMutex _l(mWatchdogLock);
+
+        while (mPause) {
+            mWatchdogCondition.wait(mWatchdogLock);
+        }
+    }
+
+    std::this_thread::sleep_for(std::chrono::milliseconds(mCycleLengthMs));
+
+    {
+        AutoMutex _l(mWatchdogLock);
+
+        for (auto it = tidToCycleCounterMap.begin(); it != tidToCycleCounterMap.end(); it++) {
+            uint32_t currentThreadId = it->first;
+
+            tidToCycleCounterMap[currentThreadId]++;
+
+            if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
+                ALOGW("CameraServiceWatchdog triggering abort for pid: %d", getpid());
+                // We use abort here so we can get a tombstone for better
+                // debugging.
+                abort();
+            }
+        }
+    }
+
+    return true;
+}
+
+void CameraServiceWatchdog::requestExit()
+{
+    Thread::requestExit();
+
+    AutoMutex _l(mWatchdogLock);
+
+    tidToCycleCounterMap.clear();
+
+    if (mPause) {
+        mPause = false;
+        mWatchdogCondition.signal();
+    }
+}
+
+void CameraServiceWatchdog::stop(uint32_t tid)
+{
+    AutoMutex _l(mWatchdogLock);
+
+    tidToCycleCounterMap.erase(tid);
+
+    if (tidToCycleCounterMap.empty()) {
+        mPause = true;
+    }
+}
+
+void CameraServiceWatchdog::start(uint32_t tid)
+{
+    AutoMutex _l(mWatchdogLock);
+
+    tidToCycleCounterMap[tid] = 0;
+
+    if (mPause) {
+        mPause = false;
+        mWatchdogCondition.signal();
+    }
+}
+
+}   // namespace android
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
new file mode 100644
index 0000000..f4955e2
--- /dev/null
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The CameraService watchdog is used to help detect bad states in the
+ * Camera HAL. The threadloop uses cycle counters, assigned to each calling
+ * thread, to monitor the elapsing time and kills the process when the
+ * expected duration has exceeded.
+ * Notes on multi-threaded behaviors:
+ *    - The threadloop is blocked/paused when there are no calls being
+ *   monitored.
+ *   - The start and stop functions handle simultaneous call monitoring
+ *   and single call monitoring differently. See function documentation for
+ *   more details.
+ */
+
+#include <chrono>
+#include <thread>
+#include <time.h>
+#include <utils/Thread.h>
+#include <utils/Log.h>
+#include <unordered_map>
+
+// Used to wrap the call of interest in start and stop calls
+#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid())
+#define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
+        watchThread([&]() { return toMonitor;}, gettid(), cycles, cycleLength);
+
+// Default cycles and cycle length values used to calculate permitted elapsed time
+const static size_t   kMaxCycles     = 100;
+const static uint32_t kCycleLengthMs = 100;
+
+namespace android {
+
+class CameraServiceWatchdog : public Thread {
+
+public:
+    explicit CameraServiceWatchdog() : mPause(true), mMaxCycles(kMaxCycles),
+            mCycleLengthMs(kCycleLengthMs) {};
+
+    explicit CameraServiceWatchdog (size_t maxCycles, uint32_t cycleLengthMs) :
+            mPause(true), mMaxCycles(maxCycles), mCycleLengthMs(cycleLengthMs) {};
+
+    virtual ~CameraServiceWatchdog() {};
+
+    virtual void requestExit();
+
+    /** Used to wrap monitored calls in start and stop functions using custom timer values */
+    template<typename T>
+    auto watchThread(T func, uint32_t tid, uint32_t cycles, uint32_t cycleLength) {
+        auto res = NULL;
+
+        if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
+            // Create another instance of the watchdog to prevent disruption
+            // of timer for current monitored calls
+            sp<CameraServiceWatchdog> tempWatchdog =
+                    new CameraServiceWatchdog(cycles, cycleLength);
+            tempWatchdog->run("CameraServiceWatchdog");
+            res = tempWatchdog->watchThread(func, tid);
+            tempWatchdog->requestExit();
+            tempWatchdog.clear();
+        } else {
+            // If custom timer values are equivalent to set class timer values, use
+            // current thread
+            res = watchThread(func, tid);
+        }
+
+        return res;
+    }
+
+    /** Used to wrap monitored calls in start and stop functions using class timer values */
+    template<typename T>
+    auto watchThread(T func, uint32_t tid) {
+        auto res = NULL;
+
+        start(tid);
+        res = func();
+        stop(tid);
+
+        return res;
+    }
+
+private:
+
+    /**
+     * Start adds a cycle counter for the calling thread. When threadloop is blocked/paused,
+     * start() unblocks and starts the watchdog
+     */
+    void start(uint32_t tid);
+
+    /**
+     * If there are no calls left to be monitored, stop blocks/pauses threadloop
+     * otherwise stop() erases the cycle counter to end watchdog for the calling thread
+     */
+    void stop(uint32_t tid);
+
+    virtual bool    threadLoop();
+
+    Mutex           mWatchdogLock;        // Lock for condition variable
+    Condition       mWatchdogCondition;   // Condition variable for stop/start
+    bool            mPause;               // True if thread is currently paused
+    uint32_t        mMaxCycles;           // Max cycles
+    uint32_t        mCycleLengthMs;       // Length of time elapsed per cycle
+
+    std::unordered_map<uint32_t, uint32_t> tidToCycleCounterMap; // Thread Id to cycle counter map
+};
+
+}   // namespace android
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 8c72bd7..68dc7f8 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -22,6 +22,7 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <camera/CameraUtils.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
@@ -33,6 +34,7 @@
 #include "api1/client2/CaptureSequencer.h"
 #include "api1/client2/CallbackProcessor.h"
 #include "api1/client2/ZslProcessor.h"
+#include "device3/RotateAndCropMapper.h"
 #include "utils/CameraThreadState.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
@@ -60,13 +62,18 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass):
-        Camera2ClientBase(cameraService, cameraClient, clientPackageName, clientFeatureId,
-                cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
+        Camera2ClientBase(cameraService, cameraClient, clientPackageName,
+                false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
+                cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
+                clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing)
 {
     ATRACE_CALL();
 
+    mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
+    mRotateAndCropIsSupported = false;
+    mRotateAndCropPreviewTransform = 0;
+
     SharedParameters::Lock l(mParameters);
     l.mParameters.state = Parameters::DISCONNECTED;
 }
@@ -105,7 +112,7 @@
     {
         SharedParameters::Lock l(mParameters);
 
-        res = l.mParameters.initialize(mDevice.get(), mDeviceVersion);
+        res = l.mParameters.initialize(mDevice.get());
         if (res != OK) {
             ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
                     __FUNCTION__, mCameraId, strerror(-res), res);
@@ -115,6 +122,14 @@
         l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
     }
 
+    const CameraMetadata& staticInfo = mDevice->info();
+    mRotateAndCropIsSupported = camera3::RotateAndCropMapper::isNeeded(&staticInfo);
+    // The 'mRotateAndCropMode' value only accounts for the necessary adjustment
+    // when the display rotates. The sensor orientation still needs to be calculated
+    // and applied similar to the Camera2 path.
+    CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
+            &mRotateAndCropPreviewTransform);
+
     String8 threadName;
 
     mStreamingProcessor = new StreamingProcessor(this);
@@ -1674,6 +1689,14 @@
                 __FUNCTION__, mCameraId, degrees);
         return BAD_VALUE;
     }
+    {
+        Mutex::Autolock icl(mRotateAndCropLock);
+        if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
+            ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
+                    mRotateAndCropMode);
+            transform = mRotateAndCropPreviewTransform;
+        }
+    }
     SharedParameters::Lock l(mParameters);
     if (transform != l.mParameters.previewTransform &&
             getPreviewStreamId() != NO_STREAM) {
@@ -2296,6 +2319,16 @@
 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop) {
     if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
 
+    {
+        Mutex::Autolock icl(mRotateAndCropLock);
+        if (mRotateAndCropIsSupported) {
+            mRotateAndCropMode = rotateAndCrop;
+        } else {
+            mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
+            return OK;
+        }
+    }
+
     return mDevice->setRotateAndCropAutoBehavior(
         static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
 }
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 64ab8ff..c8dfc46 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -238,6 +238,15 @@
     status_t initializeImpl(TProviderPtr providerPtr, const String8& monitorTags);
 
     bool isZslEnabledInStillTemplate();
+    // The current rotate & crop mode passed by camera service
+    uint8_t mRotateAndCropMode;
+    // Synchronize access to 'mRotateAndCropMode'
+    mutable Mutex mRotateAndCropLock;
+    // Contains the preview stream transformation that would normally be applied
+    // when the display rotation is 0
+    int mRotateAndCropPreviewTransform;
+    // Flag indicating camera device support for the rotate & crop interface
+    bool mRotateAndCropIsSupported;
 
     mutable Mutex mLatestRequestMutex;
     Condition mLatestRequestSignal;
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index a38d7ae..467108d 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -20,6 +20,9 @@
 
 #include <netinet/in.h>
 
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <utils/Log.h>
@@ -36,6 +39,8 @@
 namespace camera2 {
 
 using android::camera3::CAMERA_STREAM_ROTATION_0;
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
 
 JpegProcessor::JpegProcessor(
     sp<Camera2Client> client,
@@ -350,11 +355,11 @@
     size_t size;
 
     // First check for JPEG transport header at the end of the buffer
-    uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
-    struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
-    if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
-        size = blob->jpeg_size;
-        if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
+    uint8_t *header = jpegBuffer + (maxSize - sizeof(CameraBlob));
+    CameraBlob *blob = (CameraBlob*)(header);
+    if (blob->blobId == CameraBlobId::JPEG) {
+        size = blob->blobSizeBytes;
+        if (size > 0 && size <= maxSize - sizeof(CameraBlob)) {
             // Verify SOI and EOI markers
             size_t offset = size - MARKER_LENGTH;
             uint8_t *end = jpegBuffer + offset;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 80508e4..123cd75 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -50,7 +50,7 @@
 Parameters::~Parameters() {
 }
 
-status_t Parameters::initialize(CameraDeviceBase *device, int deviceVersion) {
+status_t Parameters::initialize(CameraDeviceBase *device) {
     status_t res;
     if (device == nullptr) {
         ALOGE("%s: device is null!", __FUNCTION__);
@@ -63,7 +63,6 @@
         return BAD_VALUE;
     }
     Parameters::info = &info;
-    mDeviceVersion = deviceVersion;
 
     res = buildFastInfo(device);
     if (res != OK) return res;
@@ -75,23 +74,43 @@
     // Treat the H.264 max size as the max supported video size.
     MediaProfiles *videoEncoderProfiles = MediaProfiles::getInstance();
     Vector<video_encoder> encoders = videoEncoderProfiles->getVideoEncoders();
+    int32_t minVideoWidth = MAX_PREVIEW_WIDTH;
+    int32_t minVideoHeight = MAX_PREVIEW_HEIGHT;
     int32_t maxVideoWidth = 0;
     int32_t maxVideoHeight = 0;
     for (size_t i = 0; i < encoders.size(); i++) {
-        int width = videoEncoderProfiles->getVideoEncoderParamByName(
+        int w0 = videoEncoderProfiles->getVideoEncoderParamByName(
+                "enc.vid.width.min", encoders[i]);
+        int h0 = videoEncoderProfiles->getVideoEncoderParamByName(
+                "enc.vid.height.min", encoders[i]);
+        int w1 = videoEncoderProfiles->getVideoEncoderParamByName(
                 "enc.vid.width.max", encoders[i]);
-        int height = videoEncoderProfiles->getVideoEncoderParamByName(
+        int h1 = videoEncoderProfiles->getVideoEncoderParamByName(
                 "enc.vid.height.max", encoders[i]);
-        // Treat width/height separately here to handle the case where different
-        // profile might report max size of different aspect ratio
-        if (width > maxVideoWidth) {
-            maxVideoWidth = width;
+        // Assume the min size is 0 if it's not reported by encoder
+        if (w0 == -1) {
+            w0 = 0;
         }
-        if (height > maxVideoHeight) {
-            maxVideoHeight = height;
+        if (h0 == -1) {
+            h0 = 0;
+        }
+        // Treat width/height separately here to handle the case where different
+        // profile might report min/max size of different aspect ratio
+        if (w0 < minVideoWidth) {
+            minVideoWidth = w0;
+        }
+        if (h0 < minVideoHeight) {
+            minVideoHeight = h0;
+        }
+        if (w1 > maxVideoWidth) {
+            maxVideoWidth = w1;
+        }
+        if (h1 > maxVideoHeight) {
+            maxVideoHeight = h1;
         }
     }
-    // This is just an upper bound and may not be an actually valid video size
+    // These are upper/lower bounds and may not be an actually valid video size
+    const Size VIDEO_SIZE_LOWER_BOUND = {minVideoWidth, minVideoHeight};
     Size videoSizeUpperBound = {maxVideoWidth, maxVideoHeight};
 
     if (fastInfo.supportsPreferredConfigs) {
@@ -99,9 +118,10 @@
         videoSizeUpperBound = getMaxSize(getPreferredVideoSizes());
     }
 
-    res = getFilteredSizes(maxPreviewSize, &availablePreviewSizes);
+    res = getFilteredSizes(Size{0, 0}, maxPreviewSize, &availablePreviewSizes);
     if (res != OK) return res;
-    res = getFilteredSizes(videoSizeUpperBound, &availableVideoSizes);
+    res = getFilteredSizes(
+        VIDEO_SIZE_LOWER_BOUND, videoSizeUpperBound, &availableVideoSizes);
     if (res != OK) return res;
 
     // Select initial preview and video size that's under the initial bound and
@@ -864,7 +884,6 @@
 
     if (fabs(maxDigitalZoom.data.f[0] - 1.f) > 0.00001f) {
         params.set(CameraParameters::KEY_ZOOM, zoom);
-        params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1);
 
         {
             String8 zoomRatios;
@@ -872,18 +891,34 @@
             float zoomIncrement = (maxDigitalZoom.data.f[0] - zoom) /
                     (NUM_ZOOM_STEPS-1);
             bool addComma = false;
-            for (size_t i=0; i < NUM_ZOOM_STEPS; i++) {
+            int previousZoom = -1;
+            size_t zoomSteps = 0;
+            for (size_t i = 0; i < NUM_ZOOM_STEPS; i++) {
+                int currentZoom = static_cast<int>(zoom * 100);
+                if (previousZoom == currentZoom) {
+                    zoom += zoomIncrement;
+                    continue;
+                }
                 if (addComma) zoomRatios += ",";
                 addComma = true;
-                zoomRatios += String8::format("%d", static_cast<int>(zoom * 100));
+                zoomRatios += String8::format("%d", currentZoom);
                 zoom += zoomIncrement;
+                previousZoom = currentZoom;
+                zoomSteps++;
             }
-            params.set(CameraParameters::KEY_ZOOM_RATIOS, zoomRatios);
+
+            if (zoomSteps > 0) {
+                params.set(CameraParameters::KEY_ZOOM_RATIOS, zoomRatios);
+                params.set(CameraParameters::KEY_ZOOM_SUPPORTED,
+                        CameraParameters::TRUE);
+                params.set(CameraParameters::KEY_MAX_ZOOM, zoomSteps - 1);
+                zoomAvailable = true;
+            } else {
+                params.set(CameraParameters::KEY_ZOOM_SUPPORTED,
+                        CameraParameters::FALSE);
+            }
         }
 
-        params.set(CameraParameters::KEY_ZOOM_SUPPORTED,
-                CameraParameters::TRUE);
-        zoomAvailable = true;
     } else {
         params.set(CameraParameters::KEY_ZOOM_SUPPORTED,
                 CameraParameters::FALSE);
@@ -1040,7 +1075,8 @@
     if (fastInfo.supportsPreferredConfigs) {
         previewSizeBound = getMaxSize(getPreferredPreviewSizes());
     }
-    status_t res = getFilteredSizes(previewSizeBound, &supportedPreviewSizes);
+    status_t res = getFilteredSizes(
+        Size{0, 0}, previewSizeBound, &supportedPreviewSizes);
     if (res != OK) return res;
     for (size_t i=0; i < availableFpsRanges.count; i += 2) {
         if (!isFpsSupported(supportedPreviewSizes,
@@ -2983,7 +3019,8 @@
     }
 }
 
-status_t Parameters::getFilteredSizes(Size limit, Vector<Size> *sizes) {
+status_t Parameters::getFilteredSizes(const Size &lower, const Size &upper,
+        Vector<Size> *sizes) {
     if (info == NULL) {
         ALOGE("%s: Static metadata is not initialized", __FUNCTION__);
         return NO_INIT;
@@ -2999,7 +3036,8 @@
         const StreamConfiguration &sc = scs[i];
         if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
                 sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
-                ((sc.width * sc.height) <= (limit.width * limit.height))) {
+                ((sc.width * sc.height) >= (lower.width * lower.height)) &&
+                ((sc.width * sc.height) <= (upper.width * upper.height))) {
             int64_t minFrameDuration = getMinFrameDurationNs(
                     {sc.width, sc.height}, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
             if (minFrameDuration > MAX_PREVIEW_RECORD_DURATION_NS) {
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 1b2ceda..cbe62a7 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -269,7 +269,7 @@
     ~Parameters();
 
     // Sets up default parameters
-    status_t initialize(CameraDeviceBase *device, int deviceVersion);
+    status_t initialize(CameraDeviceBase *device);
 
     // Build fast-access device static info from static info
     status_t buildFastInfo(CameraDeviceBase *device);
@@ -396,9 +396,10 @@
 
     Vector<Size> availablePreviewSizes;
     Vector<Size> availableVideoSizes;
-    // Get size list (that are no larger than limit) from static metadata.
+    // Get size list (that fall within lower/upper bounds) from static metadata.
     // This method filtered size with minFrameDuration < MAX_PREVIEW_RECORD_DURATION_NS
-    status_t getFilteredSizes(Size limit, Vector<Size> *sizes);
+    status_t getFilteredSizes(const Size &lower, const Size &upper,
+            Vector<Size> *sizes);
     // Get max size (from the size array) that matches the given aspect ratio.
     Size getMaxSizeForRatio(float ratio, const int32_t* sizeArray, size_t count);
 
@@ -458,7 +459,6 @@
     // Helper function to get the suggested video sizes
     Vector<Size> getPreferredVideoSizes() const;
 
-    int mDeviceVersion;
     uint8_t mDefaultSceneMode;
 };
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 83b8e95..5e91501 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -51,13 +51,14 @@
 
 namespace android {
 using namespace camera2;
+using namespace camera3;
 using camera3::camera_stream_rotation_t::CAMERA_STREAM_ROTATION_0;
-using camera3::SessionConfigurationUtils;
 
 CameraDeviceClientBase::CameraDeviceClientBase(
         const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
+        bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int api1CameraId,
@@ -69,6 +70,7 @@
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
             clientPackageName,
+            systemNativeClient,
             clientFeatureId,
             cameraId,
             cameraFacing,
@@ -86,6 +88,7 @@
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         const String16& clientPackageName,
+        bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int cameraFacing,
@@ -94,8 +97,8 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass) :
-    Camera2ClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
+    Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
+                clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
                 clientPid, clientUid, servicePid, overrideForPerfClass),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
@@ -140,6 +143,40 @@
                 physicalKeysEntry.data.i32 + physicalKeysEntry.count);
     }
 
+    auto entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    mDynamicProfileMap.emplace(
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+    if (entry.count > 0) {
+        const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
+                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
+        if (it != entry.data.u8 + entry.count) {
+            entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+            if (entry.count > 0 || ((entry.count % 3) != 0)) {
+                int64_t standardBitmap =
+                        ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+                for (size_t i = 0; i < entry.count; i += 3) {
+                    if (entry.data.i64[i] !=
+                            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+                        mDynamicProfileMap.emplace(entry.data.i64[i], entry.data.i64[i+1]);
+                        if ((entry.data.i64[i+1] == 0) || (entry.data.i64[i+1] &
+                                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+                            standardBitmap |= entry.data.i64[i];
+                        }
+                    } else {
+                        ALOGE("%s: Device %s includes unexpected profile entry: 0x%" PRIx64 "!",
+                                __FUNCTION__, mCameraIdStr.c_str(), entry.data.i64[i]);
+                    }
+                }
+                mDynamicProfileMap[ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD] =
+                        standardBitmap;
+            } else {
+                ALOGE("%s: Device %s supports 10-bit output but doesn't include a dynamic range"
+                        " profile map!", __FUNCTION__, mCameraIdStr.c_str());
+            }
+        }
+    }
+
     mProviderManager = providerPtr;
     // Cache physical camera ids corresponding to this device and also the high
     // resolution sensors in this device + physical camera ids
@@ -172,6 +209,7 @@
     int compositeIdx;
     int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
 
+    Mutex::Autolock l(mCompositeLock);
     // Trying to submit request with surface that wasn't created
     if (idx == NAME_NOT_FOUND) {
         ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
@@ -297,6 +335,7 @@
         SurfaceMap surfaceMap;
         Vector<int32_t> outputStreamIds;
         std::vector<std::string> requestedPhysicalIds;
+        int64_t dynamicProfileBitmap = 0;
         if (request.mSurfaceList.size() > 0) {
             for (const sp<Surface>& surface : request.mSurfaceList) {
                 if (surface == 0) continue;
@@ -313,6 +352,8 @@
                     String8 requestedPhysicalId(
                             mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
                     requestedPhysicalIds.push_back(requestedPhysicalId.string());
+                    dynamicProfileBitmap |=
+                            mConfiguredOutputs.valueAt(index).getDynamicRangeProfile();
                 } else {
                     ALOGW("%s: Output stream Id not found among configured outputs!", __FUNCTION__);
                 }
@@ -348,6 +389,41 @@
                 String8 requestedPhysicalId(
                         mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
                 requestedPhysicalIds.push_back(requestedPhysicalId.string());
+                dynamicProfileBitmap |=
+                        mConfiguredOutputs.valueAt(index).getDynamicRangeProfile();
+            }
+        }
+
+        if (dynamicProfileBitmap !=
+                    ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+            for (int i = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+                    i < ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX; i <<= 1) {
+                if ((dynamicProfileBitmap & i) == 0) {
+                    continue;
+                }
+
+                const auto& it = mDynamicProfileMap.find(i);
+                if (it != mDynamicProfileMap.end()) {
+                    if ((it->second == 0) ||
+                            ((it->second & dynamicProfileBitmap) == dynamicProfileBitmap)) {
+                        continue;
+                    } else {
+                        ALOGE("%s: Camera %s: Tried to submit a request with a surfaces that"
+                                " reference an unsupported dynamic range profile combination"
+                                " 0x%" PRIx64 "!", __FUNCTION__, mCameraIdStr.string(),
+                                dynamicProfileBitmap);
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                                "Request targets an unsupported dynamic range profile"
+                                " combination");
+                    }
+                } else {
+                    ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                            " references unsupported dynamic range profile 0x%x!",
+                            __FUNCTION__, mCameraIdStr.string(), i);
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                            "Request targets 10-bit Surface with unsupported dynamic range"
+                            " profile");
+                }
             }
         }
 
@@ -441,6 +517,17 @@
 
         metadataRequestList.push_back(physicalSettingsList);
         surfaceMapList.push_back(surfaceMap);
+
+        // Save certain CaptureRequest settings
+        if (!request.mUserTag.empty()) {
+            mUserTag = request.mUserTag;
+        }
+        camera_metadata_entry entry =
+                physicalSettingsList.begin()->metadata.find(
+                        ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
+        if (entry.count == 1) {
+            mVideoStabilizationMode = entry.data.u8[0];
+        }
     }
     mRequestIdCounter++;
 
@@ -568,6 +655,7 @@
         offlineStreamIds->clear();
         mDevice->getOfflineStreamIds(offlineStreamIds);
 
+        Mutex::Autolock l(mCompositeLock);
         for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
             err = mCompositeStreamMap.valueAt(i)->configureStream();
             if (err != OK) {
@@ -638,27 +726,11 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
-    bool earlyExit = false;
+    *status = false;
     camera3::metadataGetter getMetadata = [this](const String8 &id, bool /*overrideForPerfClass*/) {
           return mDevice->infoPhysical(id);};
-    std::vector<std::string> physicalCameraIds;
-    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
-    res = SessionConfigurationUtils::convertToHALStreamCombination(sessionConfiguration,
-            mCameraIdStr, mDevice->info(), getMetadata, physicalCameraIds, streamConfiguration,
-            mOverrideForPerfClass, &earlyExit);
-    if (!res.isOk()) {
-        return res;
-    }
-
-    if (earlyExit) {
-        *status = false;
-        return binder::Status::ok();
-    }
-
-    *status = false;
     ret = mProviderManager->isSessionConfigurationSupported(mCameraIdStr.string(),
-            streamConfiguration, status);
+            sessionConfiguration, mOverrideForPerfClass, getMetadata, status);
     switch (ret) {
         case OK:
             // Expected, do nothing.
@@ -720,6 +792,7 @@
             }
         }
 
+        Mutex::Autolock l(mCompositeLock);
         for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
             if (streamId == mCompositeStreamMap.valueAt(i)->getStreamId()) {
                 compositeIndex = i;
@@ -758,6 +831,7 @@
             }
 
             if (compositeIndex != NAME_NOT_FOUND) {
+                Mutex::Autolock l(mCompositeLock);
                 status_t ret;
                 if ((ret = mCompositeStreamMap.valueAt(compositeIndex)->deleteStream())
                         != OK) {
@@ -801,6 +875,10 @@
     String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
     bool isMultiResolution = outputConfiguration.isMultiResolution();
+    int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
+    int timestampBase = outputConfiguration.getTimestampBase();
+    int mirrorMode = outputConfiguration.getMirrorMode();
 
     res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
             outputConfiguration.getSurfaceType());
@@ -844,7 +922,8 @@
         sp<Surface> surface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                 isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase, timestampBase, mirrorMode);
 
         if (!res.isOk())
             return res;
@@ -880,6 +959,7 @@
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                 outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
         if (err == OK) {
+            Mutex::Autolock l(mCompositeLock);
             mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
                     compositeStream);
         }
@@ -888,7 +968,9 @@
                 streamInfo.height, streamInfo.format, streamInfo.dataSpace,
                 static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                 &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
-                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+                /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+                streamInfo.timestampBase, streamInfo.mirrorMode);
     }
 
     if (err != OK) {
@@ -914,7 +996,7 @@
                   streamInfo.height, streamInfo.format);
 
         // Set transform flags to ensure preview to be rotated correctly.
-        res = setStreamTransformLocked(streamId);
+        res = setStreamTransformLocked(streamId, streamInfo.mirrorMode);
 
         // Fill in mHighResolutionCameraIdToStreamIdSet map
         const String8 &cameraIdUsed =
@@ -982,7 +1064,10 @@
             overriddenSensorPixelModesUsed,
             &surfaceIds,
             outputConfiguration.getSurfaceSetID(), isShared,
-            outputConfiguration.isMultiResolution(), consumerUsage);
+            outputConfiguration.isMultiResolution(), consumerUsage,
+            outputConfiguration.getDynamicRangeProfile(),
+            outputConfiguration.getStreamUseCase(),
+            outputConfiguration.getMirrorMode());
 
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -995,14 +1080,18 @@
         mDeferredStreams.push_back(streamId);
         mStreamInfoMap.emplace(std::piecewise_construct, std::forward_as_tuple(streamId),
                 std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
-                        overriddenSensorPixelModesUsed));
+                        overriddenSensorPixelModesUsed,
+                        outputConfiguration.getDynamicRangeProfile(),
+                        outputConfiguration.getStreamUseCase(),
+                        outputConfiguration.getTimestampBase(),
+                        outputConfiguration.getMirrorMode()));
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
               __FUNCTION__, mCameraIdStr.string(), streamId, width, height, format);
 
         // Set transform flags to ensure preview to be rotated correctly.
-        res = setStreamTransformLocked(streamId);
+        res = setStreamTransformLocked(streamId, outputConfiguration.getMirrorMode());
 
         *newStreamId = streamId;
         // Fill in mHighResolutionCameraIdToStreamIdSet
@@ -1016,7 +1105,7 @@
     return res;
 }
 
-binder::Status CameraDeviceClient::setStreamTransformLocked(int streamId) {
+binder::Status CameraDeviceClient::setStreamTransformLocked(int streamId, int mirrorMode) {
     int32_t transform = 0;
     status_t err;
     binder::Status res;
@@ -1025,7 +1114,7 @@
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
 
-    err = getRotationTransformLocked(&transform);
+    err = getRotationTransformLocked(mirrorMode, &transform);
     if (err != OK) {
         // Error logged by getRotationTransformLocked.
         return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
@@ -1183,13 +1272,18 @@
     }
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
+    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
+    int timestampBase = outputConfiguration.getTimestampBase();
+    int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int mirrorMode = outputConfiguration.getMirrorMode();
 
     for (size_t i = 0; i < newOutputsMap.size(); i++) {
         OutputStreamInfo outInfo;
         sp<Surface> surface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                 /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
-                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed);
+                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase, timestampBase, mirrorMode);
         if (!res.isOk())
             return res;
 
@@ -1546,6 +1640,10 @@
     std::vector<sp<Surface>> consumerSurfaces;
     const std::vector<int32_t> &sensorPixelModesUsed =
             outputConfiguration.getSensorPixelModesUsed();
+    int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+    int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+    int timestampBase = outputConfiguration.getTimestampBase();
+    int mirrorMode = outputConfiguration.getMirrorMode();
     for (auto& bufferProducer : bufferProducers) {
         // Don't create multiple streams for the same target surface
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1558,7 +1656,8 @@
         sp<Surface> surface;
         res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
-                mDevice->infoPhysical(physicalId), sensorPixelModesUsed);
+                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
+                streamUseCase, timestampBase, mirrorMode);
 
         if (!res.isOk())
             return res;
@@ -1693,10 +1792,11 @@
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
         }
 
+        Mutex::Autolock l(mCompositeLock);
         bool isCompositeStream = false;
-        for (const auto& gbp : mConfiguredOutputs[streamId].getGraphicBufferProducers()) {
+        for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
-            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
                 camera3::HeicCompositeStream::isHeicCompositeStream(s);
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
@@ -1743,6 +1843,7 @@
         mConfiguredOutputs.clear();
         mDeferredStreams.clear();
         mStreamInfoMap.clear();
+        Mutex::Autolock l(mCompositeLock);
         mCompositeStreamMap.clear();
         mInputStream = {false, 0, 0, 0, 0};
     } else {
@@ -1804,16 +1905,50 @@
     return dumpDevice(fd, args);
 }
 
+status_t CameraDeviceClient::startWatchingTags(const String8 &tags, int out) {
+    sp<CameraDeviceBase> device = mDevice;
+    if (!device) {
+        dprintf(out, "  Device is detached.");
+        return OK;
+    }
+    device->startWatchingTags(tags);
+    return OK;
+}
+
+status_t CameraDeviceClient::stopWatchingTags(int out) {
+    sp<CameraDeviceBase> device = mDevice;
+    if (!device) {
+        dprintf(out, "  Device is detached.");
+        return OK;
+    }
+    device->stopWatchingTags();
+    return OK;
+}
+
+status_t CameraDeviceClient::dumpWatchedEventsToVector(std::vector<std::string> &out) {
+    sp<CameraDeviceBase> device = mDevice;
+    if (!device) {
+        return OK;
+    }
+    device->dumpWatchedEventsToVector(out);
+    return OK;
+}
+
 void CameraDeviceClient::notifyError(int32_t errorCode,
                                      const CaptureResultExtras& resultExtras) {
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
 
-    // Composites can have multiple internal streams. Error notifications coming from such internal
-    // streams may need to remain within camera service.
     bool skipClientNotification = false;
-    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
-        skipClientNotification |= mCompositeStreamMap.valueAt(i)->onError(errorCode, resultExtras);
+    {
+        // Access to the composite stream map must be synchronized
+        Mutex::Autolock l(mCompositeLock);
+        // Composites can have multiple internal streams. Error notifications coming from such
+        // internal streams may need to remain within camera service.
+        for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+            skipClientNotification |= mCompositeStreamMap.valueAt(i)->onError(errorCode,
+                    resultExtras);
+        }
     }
 
     if ((remoteCb != 0) && (!skipClientNotification)) {
@@ -1841,7 +1976,8 @@
     if (remoteCb != 0) {
         remoteCb->onDeviceIdle();
     }
-    Camera2ClientBase::notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+    Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
+            streamStats, mUserTag, mVideoStabilizationMode);
 }
 
 void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -1853,6 +1989,8 @@
     }
     Camera2ClientBase::notifyShutter(resultExtras, timestamp);
 
+    // Access to the composite stream map must be synchronized
+    Mutex::Autolock l(mCompositeLock);
     for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
         mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
     }
@@ -1880,13 +2018,15 @@
     nsecs_t startTime = systemTime();
     ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
 
-    mFrameProcessor->removeListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
-                                    camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
-                                    /*listener*/this);
-    mFrameProcessor->requestExit();
-    ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string());
-    mFrameProcessor->join();
-    ALOGV("Camera %s: Disconnecting device", mCameraIdStr.string());
+    if (mFrameProcessor.get() != nullptr) {
+        mFrameProcessor->removeListener(
+                camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
+                camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
+        mFrameProcessor->requestExit();
+        ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string());
+        mFrameProcessor->join();
+        ALOGV("Camera %s: Disconnecting device", mCameraIdStr.string());
+    }
 
     // WORKAROUND: HAL refuses to disconnect while there's streams in flight
     {
@@ -1902,14 +2042,17 @@
         }
     }
 
-    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
-        auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
-        if (ret != OK) {
-            ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
-                    strerror(-ret), ret);
+    {
+        Mutex::Autolock l(mCompositeLock);
+        for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+            auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+            if (ret != OK) {
+                ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
+                        strerror(-ret), ret);
+            }
         }
+        mCompositeStreamMap.clear();
     }
-    mCompositeStreamMap.clear();
 
     Camera2ClientBase::detachDevice();
 
@@ -1929,6 +2072,8 @@
                 result.mPhysicalMetadatas);
     }
 
+    // Access to the composite stream map must be synchronized
+    Mutex::Autolock l(mCompositeLock);
     for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
         mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
     }
@@ -1997,11 +2142,12 @@
     return true;
 }
 
-status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) {
+status_t CameraDeviceClient::getRotationTransformLocked(int mirrorMode,
+        int32_t* transform) {
     ALOGV("%s: begin", __FUNCTION__);
 
     const CameraMetadata& staticInfo = mDevice->info();
-    return CameraUtils::getRotationTransform(staticInfo, transform);
+    return CameraUtils::getRotationTransform(staticInfo, mirrorMode, transform);
 }
 
 binder::Status CameraDeviceClient::mapRequestTemplate(int templateId,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 76b3f53..c5aad6b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -22,6 +22,7 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <unordered_map>
 
 #include "CameraOfflineSessionClient.h"
 #include "CameraService.h"
@@ -49,6 +50,7 @@
     CameraDeviceClientBase(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
+            bool systemNativeClient,
             const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int api1CameraId,
@@ -177,6 +179,7 @@
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             const String16& clientPackageName,
+            bool clientPackageOverride,
             const std::optional<String16>& clientFeatureId,
             const String8& cameraId,
             int cameraFacing,
@@ -199,6 +202,10 @@
 
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
 
+    virtual status_t      startWatchingTags(const String8 &tags, int out);
+    virtual status_t      stopWatchingTags(int out);
+    virtual status_t      dumpWatchedEventsToVector(std::vector<std::string> &out);
+
     /**
      * Device listener interface
      */
@@ -222,7 +229,7 @@
     virtual void          detachDevice();
 
     // Calculate the ANativeWindow transform from android.sensor.orientation
-    status_t              getRotationTransformLocked(/*out*/int32_t* transform);
+    status_t              getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
 
     bool isUltraHighResolutionSensor(const String8 &cameraId);
 
@@ -282,7 +289,7 @@
 
     // Set the stream transform flags to automatically rotate the camera stream for preview use
     // cases.
-    binder::Status setStreamTransformLocked(int streamId);
+    binder::Status setStreamTransformLocked(int streamId, int mirrorMode);
 
     // Utility method to insert the surface into SurfaceMap
     binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
@@ -299,6 +306,10 @@
     // Stream ID -> OutputConfiguration. Used for looking up Surface by stream/surface index
     KeyedVector<int32_t, hardware::camera2::params::OutputConfiguration> mConfiguredOutputs;
 
+    // Dynamic range profile id -> Supported dynamic profiles bitmap within an single capture
+    // request
+    std::unordered_map<int64_t, int64_t> mDynamicProfileMap;
+
     struct InputStreamConfiguration {
         bool configured;
         int32_t width;
@@ -330,12 +341,19 @@
     // set of high resolution camera id (logical / physical)
     std::unordered_set<std::string> mHighResolutionSensors;
 
+    // Synchronize access to 'mCompositeStreamMap'
+    Mutex mCompositeLock;
     KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
 
     sp<CameraProviderManager> mProviderManager;
 
     // Override the camera characteristics for performance class primary cameras.
     bool mOverrideForPerfClass;
+
+    // The string representation of object passed into CaptureRequest.setTag.
+    std::string mUserTag;
+    // The last set video stabilization mode
+    int mVideoStabilizationMode = -1;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index ef15f2d..9303fd2 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -110,6 +110,18 @@
     return OK;
 }
 
+status_t CameraOfflineSessionClient::startWatchingTags(const String8 &tags, int outFd) {
+    return BasicClient::startWatchingTags(tags, outFd);
+}
+
+status_t CameraOfflineSessionClient::stopWatchingTags(int outFd) {
+    return BasicClient::stopWatchingTags(outFd);
+}
+
+status_t CameraOfflineSessionClient::dumpWatchedEventsToVector(std::vector<std::string> &out) {
+    return BasicClient::dumpWatchedEventsToVector(out);
+}
+
 binder::Status CameraOfflineSessionClient::disconnect() {
     Mutex::Autolock icl(mBinderSerializationLock);
 
@@ -275,7 +287,7 @@
     }
 }
 
-status_t CameraOfflineSessionClient::notifyActive() {
+status_t CameraOfflineSessionClient::notifyActive(float maxPreviewFps __unused) {
     return startCameraStreamingOps();
 }
 
@@ -330,5 +342,19 @@
                 CaptureResultExtras());
 }
 
+status_t CameraOfflineSessionClient::injectCamera(const String8& injectedCamId,
+            sp<CameraProviderManager> manager) {
+    ALOGV("%s: This client doesn't support the injection camera. injectedCamId: %s providerPtr: %p",
+            __FUNCTION__, injectedCamId.string(), manager.get());
+
+    return OK;
+}
+
+status_t CameraOfflineSessionClient::stopInjection() {
+    ALOGV("%s: This client doesn't support the injection camera.", __FUNCTION__);
+
+    return OK;
+}
+
 // ----------------------------------------------------------------------------
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index b219a4c..f2c42d8 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -54,7 +54,8 @@
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
-                    clientPackageName, clientFeatureId,
+                    // (v)ndk doesn't have offline session support
+                    clientPackageName, /*overridePackageName*/false, clientFeatureId,
                     cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
@@ -71,6 +72,10 @@
 
     status_t dumpClient(int /*fd*/, const Vector<String16>& /*args*/) override;
 
+    status_t startWatchingTags(const String8 &tags, int outFd) override;
+    status_t stopWatchingTags(int outFd) override;
+    status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
+
     status_t initialize(sp<CameraProviderManager> /*manager*/,
             const String8& /*monitorTags*/) override;
 
@@ -89,7 +94,7 @@
     // NotificationListener API
     void notifyError(int32_t errorCode, const CaptureResultExtras& resultExtras) override;
     void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
-    status_t notifyActive() override;
+    status_t notifyActive(float maxPreviewFps) override;
     void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::vector<hardware::CameraStreamStats>& streamStats) override;
     void notifyAutoFocus(uint8_t newState, int triggerId) override;
@@ -98,6 +103,9 @@
     void notifyPrepared(int streamId) override;
     void notifyRequestQueueEmpty() override;
     void notifyRepeatingRequestError(long lastFrameNumber) override;
+    status_t injectCamera(const String8& injectedCamId,
+            sp<CameraProviderManager> manager) override;
+    status_t stopInjection() override;
 
 private:
     mutable Mutex mBinderSerializationLock;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index a66a592..048d85d 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -18,6 +18,9 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
 #include "api1/client2/JpegProcessor.h"
 #include "common/CameraProviderManager.h"
 #include "utils/SessionConfigurationUtils.h"
@@ -30,6 +33,9 @@
 namespace android {
 namespace camera3 {
 
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
 DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
         wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
         CompositeStream(device, cb),
@@ -297,7 +303,8 @@
     }
 
     sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
-    res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, fenceFd);
+    GraphicBufferLocker gbLocker(gb);
+    res = gbLocker.lockAsync(&dstBuffer, fenceFd);
     if (res != OK) {
         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
@@ -366,7 +373,7 @@
         return res;
     }
 
-    size_t finalJpegSize = actualJpegSize + sizeof(struct camera_jpeg_blob);
+    size_t finalJpegSize = actualJpegSize + sizeof(CameraBlob);
     if (finalJpegSize > finalJpegBufferSize) {
         ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
@@ -382,10 +389,10 @@
 
     ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
     uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
-        (gb->getWidth() - sizeof(struct camera_jpeg_blob));
-    struct camera_jpeg_blob *blob = reinterpret_cast<struct camera_jpeg_blob*> (header);
-    blob->jpeg_blob_id = CAMERA_JPEG_BLOB_ID;
-    blob->jpeg_size = actualJpegSize;
+        (gb->getWidth() - sizeof(CameraBlob));
+    CameraBlob *blob = reinterpret_cast<CameraBlob*> (header);
+    blob->blobId = CameraBlobId::JPEG;
+    blob->blobSizeBytes = actualJpegSize;
     outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
 
     return res;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a73ffb9..54cc27a 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -23,7 +23,8 @@
 #include <pthread.h>
 #include <sys/syscall.h>
 
-#include <android/hardware/camera/device/3.5/types.h>
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
 #include <libyuv.h>
 #include <gui/Surface.h>
 #include <utils/Log.h>
@@ -41,8 +42,8 @@
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
-using android::hardware::camera::device::V3_5::CameraBlob;
-using android::hardware::camera::device::V3_5::CameraBlobId;
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
 
 namespace android {
 namespace camera3 {
@@ -441,6 +442,10 @@
             newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
             newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
             newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
+            int32_t left, top, right, bottom;
+            if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+                newFormat->setRect("crop", 0, 0, mOutputWidth - 1, mOutputHeight - 1);
+            }
         }
     }
     newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
@@ -1130,7 +1135,8 @@
     // Copy the content of the file to memory.
     sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
     void* dstBuffer;
-    auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
+    GraphicBufferLocker gbLocker(gb);
+    auto res = gbLocker.lockAsync(&dstBuffer, inputFrame.fenceFd);
     if (res != OK) {
         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
@@ -1156,10 +1162,10 @@
 
     // Fill in HEIC header
     uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
-    struct CameraBlob *blobHeader = (struct CameraBlob *)header;
+    CameraBlob *blobHeader = (CameraBlob *)header;
     // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
     blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
-    blobHeader->blobSize = fSize;
+    blobHeader->blobSizeBytes = fSize;
 
     res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
     if (res != OK) {
@@ -1421,15 +1427,15 @@
 
     size_t expectedSize = 0;
     // First check for EXIF transport header at the end of the buffer
-    const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
-    const struct CameraBlob *blob = (const struct CameraBlob*)(header);
+    const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
+    const CameraBlob *blob = (const CameraBlob*)(header);
     if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
-        ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
+        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, blob->blobId);
         return 0;
     }
 
-    expectedSize = blob->blobSize;
-    if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
+    expectedSize = blob->blobSizeBytes;
+    if (expectedSize == 0 || expectedSize > maxSize - sizeof(CameraBlob)) {
         ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
         return 0;
     }
@@ -1632,7 +1638,7 @@
         maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
                 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
     }
-    return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
+    return maxAppsSegment * (2 + 0xFFFF) + sizeof(CameraBlob);
 }
 
 void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 5e086c0..dc5002b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -34,6 +34,8 @@
 #include "api2/CameraDeviceClient.h"
 
 #include "device3/Camera3Device.h"
+#include "device3/aidl/AidlCamera3Device.h"
+#include "device3/hidl/HidlCamera3Device.h"
 #include "utils/CameraThreadState.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
@@ -47,6 +49,7 @@
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
         const String16& clientPackageName,
+        bool systemNativeClient,
         const std::optional<String16>& clientFeatureId,
         const String8& cameraId,
         int api1CameraId,
@@ -57,19 +60,18 @@
         int servicePid,
         bool overrideForPerfClass,
         bool legacyClient):
-        TClientBase(cameraService, remoteCallback, clientPackageName, clientFeatureId,
-                cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid, clientUid,
-                servicePid),
+        TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
+                clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
+                clientUid, servicePid),
         mSharedCameraCallbacks(remoteCallback),
-        mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
-        mDevice(new Camera3Device(cameraId, overrideForPerfClass, legacyClient)),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
     ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
             String8(clientPackageName).string(), clientPid, clientUid);
 
     mInitialClientPid = clientPid;
-    LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here.");
+    mOverrideForPerfClass = overrideForPerfClass;
+    mLegacyClient = legacyClient;
 }
 
 template <typename TClientBase>
@@ -104,7 +106,28 @@
     if (res != OK) {
         return res;
     }
-
+    IPCTransport providerTransport = IPCTransport::INVALID;
+    res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr.string(),
+            &providerTransport);
+    if (res != OK) {
+        return res;
+    }
+    switch (providerTransport) {
+        case IPCTransport::HIDL:
+            mDevice =
+                    new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                            mLegacyClient);
+            break;
+        case IPCTransport::AIDL:
+            mDevice =
+                    new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+                            mLegacyClient);
+             break;
+        default:
+            ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
+                    TClientBase::mCameraIdStr.string());
+            return NO_INIT;
+    }
     if (mDevice == NULL) {
         ALOGE("%s: Camera %s: No device connected",
                 __FUNCTION__, TClientBase::mCameraIdStr.string());
@@ -156,6 +179,38 @@
 }
 
 template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::startWatchingTags(const String8 &tags, int out) {
+  sp<CameraDeviceBase> device = mDevice;
+  if (!device) {
+    dprintf(out, "  Device is detached");
+    return OK;
+  }
+
+  return device->startWatchingTags(tags);
+}
+
+template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::stopWatchingTags(int out) {
+  sp<CameraDeviceBase> device = mDevice;
+  if (!device) {
+    dprintf(out, "  Device is detached");
+    return OK;
+  }
+
+  return device->stopWatchingTags();
+}
+
+template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::dumpWatchedEventsToVector(std::vector<std::string> &out) {
+    sp<CameraDeviceBase> device = mDevice;
+    if (!device) {
+        // Nothing to dump if the device is detached
+        return OK;
+    }
+    return device->dumpWatchedEventsToVector(out);
+}
+
+template <typename TClientBase>
 status_t Camera2ClientBase<TClientBase>::dumpDevice(
                                                 int fd,
                                                 const Vector<String16>& args) {
@@ -187,20 +242,23 @@
 template <typename TClientBase>
 binder::Status Camera2ClientBase<TClientBase>::disconnect() {
     ATRACE_CALL();
+    ALOGD("Camera %s: start to disconnect", TClientBase::mCameraIdStr.string());
     Mutex::Autolock icl(mBinderSerializationLock);
 
+    ALOGD("Camera %s: serializationLock acquired", TClientBase::mCameraIdStr.string());
     binder::Status res = binder::Status::ok();
     // Allow both client and the media server to disconnect at all times
     int callingPid = CameraThreadState::getCallingPid();
     if (callingPid != TClientBase::mClientPid &&
         callingPid != TClientBase::mServicePid) return res;
 
-    ALOGV("Camera %s: Shutting down", TClientBase::mCameraIdStr.string());
+    ALOGD("Camera %s: Shutting down", TClientBase::mCameraIdStr.string());
 
     // Before detaching the device, cache the info from current open session.
     // The disconnected check avoids duplication of info and also prevents
     // deadlock while acquiring service lock in cacheDump.
     if (!TClientBase::mDisconnected) {
+        ALOGD("Camera %s: start to cacheDump", TClientBase::mCameraIdStr.string());
         Camera2ClientBase::getCameraService()->cacheDump();
     }
 
@@ -259,7 +317,7 @@
 }
 
 template <typename TClientBase>
-status_t Camera2ClientBase<TClientBase>::notifyActive() {
+status_t Camera2ClientBase<TClientBase>::notifyActive(float maxPreviewFps) {
     if (!mDeviceActive) {
         status_t res = TClientBase::startCameraStreamingOps();
         if (res != OK) {
@@ -267,7 +325,7 @@
                     TClientBase::mCameraIdStr.string(), res);
             return res;
         }
-        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr);
+        CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
     }
     mDeviceActive = true;
 
@@ -276,9 +334,10 @@
 }
 
 template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyIdle(
+void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
-        const std::vector<hardware::CameraStreamStats>& streamStats) {
+        const std::vector<hardware::CameraStreamStats>& streamStats,
+        const std::string& userTag, int videoStabilizationMode) {
     if (mDeviceActive) {
         status_t res = TClientBase::finishCameraStreamingOps();
         if (res != OK) {
@@ -286,7 +345,8 @@
                     TClientBase::mCameraIdStr.string(), res);
         }
         CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
-                requestCount, resultErrorCount, deviceError, streamStats);
+                requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
+                streamStats);
     }
     mDeviceActive = false;
 
@@ -362,11 +422,6 @@
 }
 
 template <typename TClientBase>
-int Camera2ClientBase<TClientBase>::getCameraDeviceVersion() const {
-    return mDeviceVersion;
-}
-
-template <typename TClientBase>
 const sp<CameraDeviceBase>& Camera2ClientBase<TClientBase>::getCameraDevice() {
     return mDevice;
 }
@@ -414,6 +469,17 @@
     mRemoteCallback.clear();
 }
 
+template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::injectCamera(const String8& injectedCamId,
+        sp<CameraProviderManager> manager) {
+    return mDevice->injectCamera(injectedCamId, manager);
+}
+
+template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::stopInjection() {
+    return mDevice->stopInjection();
+}
+
 template class Camera2ClientBase<CameraService::Client>;
 template class Camera2ClientBase<CameraDeviceClientBase>;
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index c49ea2c..b0d1c3f 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -48,6 +48,7 @@
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
                       const String16& clientPackageName,
+                      bool systemNativeClient,
                       const std::optional<String16>& clientFeatureId,
                       const String8& cameraId,
                       int api1CameraId,
@@ -62,6 +63,9 @@
 
     virtual status_t      initialize(sp<CameraProviderManager> manager, const String8& monitorTags);
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
+    virtual status_t      startWatchingTags(const String8 &tags, int out);
+    virtual status_t      stopWatchingTags(int out);
+    virtual status_t      dumpWatchedEventsToVector(std::vector<std::string> &out);
 
     /**
      * NotificationListener implementation
@@ -69,10 +73,11 @@
 
     virtual void          notifyError(int32_t errorCode,
                                       const CaptureResultExtras& resultExtras);
-    virtual status_t      notifyActive();  // Returns errors on app ops permission failures
-    virtual void          notifyIdle(int64_t requestCount, int64_t resultErrorCount,
-                                     bool deviceError,
-                                     const std::vector<hardware::CameraStreamStats>& streamStats);
+    // Returns errors on app ops permission failures
+    virtual status_t      notifyActive(float maxPreviewFps);
+    virtual void          notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
+                                     bool /*deviceError*/,
+                                     const std::vector<hardware::CameraStreamStats>&) {}
     virtual void          notifyShutter(const CaptureResultExtras& resultExtras,
                                         nsecs_t timestamp);
     virtual void          notifyAutoFocus(uint8_t newState, int triggerId);
@@ -83,6 +88,11 @@
     virtual void          notifyRequestQueueEmpty();
     virtual void          notifyRepeatingRequestError(long lastFrameNumber);
 
+    void                  notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
+                                     bool deviceError,
+                                     const std::vector<hardware::CameraStreamStats>& streamStats,
+                                     const std::string& userTag, int videoStabilizationMode);
+
     int                   getCameraId() const;
     const sp<CameraDeviceBase>&
                           getCameraDevice();
@@ -115,10 +125,16 @@
         mutable Mutex mRemoteCallbackLock;
     } mSharedCameraCallbacks;
 
+    status_t      injectCamera(const String8& injectedCamId,
+                               sp<CameraProviderManager> manager) override;
+    status_t      stopInjection() override;
+
 protected:
 
     // The PID provided in the constructor call
     pid_t mInitialClientPid;
+    bool mOverrideForPerfClass = false;
+    bool mLegacyClient = false;
 
     virtual sp<IBinder> asBinderWrapper() {
         return IInterface::asBinder(this);
@@ -136,11 +152,12 @@
 
     /** CameraDeviceBase instance wrapping HAL3+ entry */
 
-    const int mDeviceVersion;
-
-    // Set to const to avoid mDevice being updated (update of sp<> is racy) during
-    // dumpDevice (which is important to be lock free for debugging purpose)
-    const sp<CameraDeviceBase>  mDevice;
+    // Note: This was previously set to const to avoid mDevice being updated -
+    // b/112639939 (update of sp<> is racy) during dumpDevice (which is important to be lock free
+    // for debugging purpose). The const has been removed since CameraDeviceBase
+    // needs to be set during initializeImpl(). This must not be set / cleared
+    // anywhere else.
+    sp<CameraDeviceBase>  mDevice;
 
     /** Utility members */
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index b42f3f6..7e2f93c 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -35,6 +35,7 @@
 #include "device3/StatusTracker.h"
 #include "binder/Status.h"
 #include "FrameProducer.h"
+#include "utils/IPCTransport.h"
 
 #include "CameraOfflineSessionBase.h"
 
@@ -59,6 +60,9 @@
     CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
 } camera_stream_configuration_mode_t;
 
+// Matches definition of camera3_jpeg_blob in camera3.h and HIDL definition
+// device@3.2:types.hal, needs to stay around till HIDL support is removed (for
+// HIDL -> AIDL cameraBlob translation)
 typedef struct camera_jpeg_blob {
     uint16_t jpeg_blob_id;
     uint32_t jpeg_size;
@@ -88,6 +92,8 @@
   public:
     virtual ~CameraDeviceBase();
 
+    virtual IPCTransport getTransportType() const = 0;
+
     /**
      * The device vendor tag ID
      */
@@ -97,6 +103,9 @@
     virtual status_t disconnect() = 0;
 
     virtual status_t dump(int fd, const Vector<String16> &args) = 0;
+    virtual status_t startWatchingTags(const String8 &tags) = 0;
+    virtual status_t stopWatchingTags() = 0;
+    virtual status_t dumpWatchedEventsToVector(std::vector<std::string> &out) = 0;
 
     /**
      * The physical camera device's static characteristics metadata buffer, or
@@ -179,7 +188,11 @@
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
-            uint64_t consumerUsage = 0) = 0;
+            uint64_t consumerUsage = 0,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
 
     /**
      * Create an output stream of the requested size, format, rotation and
@@ -196,7 +209,11 @@
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
-            uint64_t consumerUsage = 0) = 0;
+            uint64_t consumerUsage = 0,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
 
     /**
      * Create an input stream of width, height, and format.
@@ -217,10 +234,12 @@
         android_dataspace dataSpace;
         bool dataSpaceOverridden;
         android_dataspace originalDataSpace;
+        int64_t dynamicRangeProfile;
 
         StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
                 dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
-                originalDataSpace(HAL_DATASPACE_UNKNOWN) {}
+                originalDataSpace(HAL_DATASPACE_UNKNOWN),
+                dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
         /**
          * Check whether the format matches the current or the original one in case
          * it got overridden.
@@ -439,6 +458,18 @@
      */
     void setImageDumpMask(int mask) { mImageDumpMask = mask; }
 
+    /**
+     * The injection camera session to replace the internal camera
+     * session.
+     */
+    virtual status_t injectCamera(const String8& injectedCamId,
+            sp<CameraProviderManager> manager) = 0;
+
+    /**
+     * Stop the injection camera and restore to internal camera session.
+     */
+    virtual status_t stopInjection() = 0;
+
 protected:
     bool mImageDumpMask = 0;
 };
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 54e42a6..f39b92a 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -40,7 +40,8 @@
     // Required for API 1 and 2
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras &resultExtras) = 0;
-    virtual status_t notifyActive() = 0; // May return an error since it checks appops
+    // May return an error since it checks appops
+    virtual status_t notifyActive(float maxPreviewFps) = 0;
     virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
             const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
 
diff --git a/services/camera/libcameraservice/common/CameraProviderInfoTemplated.h b/services/camera/libcameraservice/common/CameraProviderInfoTemplated.h
new file mode 100644
index 0000000..db7692a
--- /dev/null
+++ b/services/camera/libcameraservice/common/CameraProviderInfoTemplated.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_CAMERAPROVIDERINFO_TEMPLATEDH
+#define ANDROID_SERVERS_CAMERA_CAMERAPROVIDERINFO_TEMPLATEDH
+
+#include "common/CameraProviderManager.h"
+
+namespace android {
+
+template <class VendorTagSectionVectorType, class VendorTagSectionType>
+status_t IdlVendorTagDescriptor::createDescriptorFromIdl(
+        const VendorTagSectionVectorType& vts,
+        sp<VendorTagDescriptor>& descriptor) {
+
+    int tagCount = 0;
+
+    for (size_t s = 0; s < vts.size(); s++) {
+        tagCount += vts[s].tags.size();
+    }
+
+    if (tagCount < 0 || tagCount > INT32_MAX) {
+        ALOGE("%s: tag count %d from vendor tag sections is invalid.", __FUNCTION__, tagCount);
+        return BAD_VALUE;
+    }
+
+    Vector<uint32_t> tagArray;
+    LOG_ALWAYS_FATAL_IF(tagArray.resize(tagCount) != tagCount,
+            "%s: too many (%u) vendor tags defined.", __FUNCTION__, tagCount);
+    sp<IdlVendorTagDescriptor> desc = new IdlVendorTagDescriptor();
+    desc->mTagCount = tagCount;
+
+    SortedVector<String8> sections;
+    KeyedVector<uint32_t, String8> tagToSectionMap;
+
+    int idx = 0;
+    for (size_t s = 0; s < vts.size(); s++) {
+        const VendorTagSectionType& section = vts[s];
+        const char *sectionName = section.sectionName.c_str();
+        if (sectionName == NULL) {
+            ALOGE("%s: no section name defined for vendor tag section %zu.", __FUNCTION__, s);
+            return BAD_VALUE;
+        }
+        String8 sectionString(sectionName);
+        sections.add(sectionString);
+
+        for (size_t j = 0; j < section.tags.size(); j++) {
+            uint32_t tag = section.tags[j].tagId;
+            if (tag < CAMERA_METADATA_VENDOR_TAG_BOUNDARY) {
+                ALOGE("%s: vendor tag %d not in vendor tag section.", __FUNCTION__, tag);
+                return BAD_VALUE;
+            }
+
+            tagArray.editItemAt(idx++) = section.tags[j].tagId;
+
+            const char *tagName = section.tags[j].tagName.c_str();
+            if (tagName == NULL) {
+                ALOGE("%s: no tag name defined for vendor tag %d.", __FUNCTION__, tag);
+                return BAD_VALUE;
+            }
+            desc->mTagToNameMap.add(tag, String8(tagName));
+            tagToSectionMap.add(tag, sectionString);
+
+            int tagType = (int) section.tags[j].tagType;
+            if (tagType < 0 || tagType >= NUM_TYPES) {
+                ALOGE("%s: tag type %d from vendor ops does not exist.", __FUNCTION__, tagType);
+                return BAD_VALUE;
+            }
+            desc->mTagToTypeMap.add(tag, tagType);
+        }
+    }
+
+    desc->mSections = sections;
+
+    for (size_t i = 0; i < tagArray.size(); ++i) {
+        uint32_t tag = tagArray[i];
+        String8 sectionString = tagToSectionMap.valueFor(tag);
+
+        // Set up tag to section index map
+        ssize_t index = sections.indexOf(sectionString);
+        LOG_ALWAYS_FATAL_IF(index < 0, "index %zd must be non-negative", index);
+        desc->mTagToSectionMap.add(tag, static_cast<uint32_t>(index));
+
+        // Set up reverse mapping
+        ssize_t reverseIndex = -1;
+        if ((reverseIndex = desc->mReverseMapping.indexOfKey(sectionString)) < 0) {
+            KeyedVector<String8, uint32_t>* nameMapper = new KeyedVector<String8, uint32_t>();
+            reverseIndex = desc->mReverseMapping.add(sectionString, nameMapper);
+        }
+        desc->mReverseMapping[reverseIndex]->add(desc->mTagToNameMap.valueFor(tag), tag);
+    }
+
+    descriptor = std::move(desc);
+    return OK;
+}
+
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 2f74df9..abaea66 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -20,14 +20,17 @@
 
 #include "CameraProviderManager.h"
 
-#include <android/hardware/camera/device/3.7/ICameraDevice.h>
+#include <aidl/android/hardware/camera/device/ICameraDevice.h>
 
 #include <algorithm>
 #include <chrono>
 #include "common/DepthPhotoProcessor.h"
+#include "hidl/HidlProviderInfo.h"
+#include "aidl/AidlProviderInfo.h"
 #include <dlfcn.h>
 #include <future>
 #include <inttypes.h>
+#include <android/binder_manager.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <functional>
@@ -36,7 +39,6 @@
 #include <android-base/logging.h>
 #include <cutils/properties.h>
 #include <hwbinder/IPCThreadState.h>
-#include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
 
 #include "api2/HeicCompositeStream.h"
@@ -45,26 +47,53 @@
 namespace android {
 
 using namespace ::android::hardware::camera;
-using namespace ::android::hardware::camera::common::V1_0;
-using camera3::SessionConfigurationUtils;
+using namespace ::android::camera3;
+using android::hardware::camera::common::V1_0::Status;
+using namespace camera3::SessionConfigurationUtils;
 using std::literals::chrono_literals::operator""s;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
-using hardware::camera::provider::V2_7::CameraIdAndStreamCombination;
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
+const std::string kExternalProviderName = "external/0";
 } // anonymous namespace
 
 const float CameraProviderManager::kDepthARTolerance = .1f;
 
-CameraProviderManager::HardwareServiceInteractionProxy
-CameraProviderManager::sHardwareServiceInteractionProxy{};
+CameraProviderManager::HidlServiceInteractionProxyImpl
+CameraProviderManager::sHidlServiceInteractionProxy{};
 
 CameraProviderManager::~CameraProviderManager() {
 }
 
+const char* FrameworkTorchStatusToString(const TorchModeStatus& s) {
+    switch (s) {
+        case TorchModeStatus::NOT_AVAILABLE:
+            return "NOT_AVAILABLE";
+        case TorchModeStatus::AVAILABLE_OFF:
+            return "AVAILABLE_OFF";
+        case TorchModeStatus::AVAILABLE_ON:
+            return "AVAILABLE_ON";
+    }
+    ALOGW("Unexpected HAL torch mode status code %d", s);
+    return "UNKNOWN_STATUS";
+}
+
+const char* FrameworkDeviceStatusToString(const CameraDeviceStatus& s) {
+    switch (s) {
+        case CameraDeviceStatus::NOT_PRESENT:
+            return "NOT_PRESENT";
+        case CameraDeviceStatus::PRESENT:
+            return "PRESENT";
+        case CameraDeviceStatus::ENUMERATING:
+            return "ENUMERATING";
+    }
+    ALOGW("Unexpected HAL device status code %d", s);
+    return "UNKNOWN_STATUS";
+}
+
 hardware::hidl_vec<hardware::hidl_string>
-CameraProviderManager::HardwareServiceInteractionProxy::listServices() {
+CameraProviderManager::HidlServiceInteractionProxyImpl::listServices() {
     hardware::hidl_vec<hardware::hidl_string> ret;
     auto manager = hardware::defaultServiceManager1_2();
     if (manager != nullptr) {
@@ -76,20 +105,11 @@
     return ret;
 }
 
-status_t CameraProviderManager::initialize(wp<CameraProviderManager::StatusListener> listener,
-        ServiceInteractionProxy* proxy) {
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    if (proxy == nullptr) {
-        ALOGE("%s: No valid service interaction proxy provided", __FUNCTION__);
-        return BAD_VALUE;
-    }
-    mListener = listener;
-    mServiceProxy = proxy;
-    mDeviceState = static_cast<hardware::hidl_bitfield<provider::V2_5::DeviceState>>(
-        provider::V2_5::DeviceState::NORMAL);
-
+status_t CameraProviderManager::tryToInitAndAddHidlProvidersLocked(
+        HidlServiceInteractionProxy *hidlProxy) {
+    mHidlServiceProxy = hidlProxy;
     // Registering will trigger notifications for all already-known providers
-    bool success = mServiceProxy->registerForNotifications(
+    bool success = mHidlServiceProxy->registerForNotifications(
         /* instance name, empty means no filter */ "",
         this);
     if (!success) {
@@ -98,14 +118,57 @@
         return INVALID_OPERATION;
     }
 
-
-    for (const auto& instance : mServiceProxy->listServices()) {
-        this->addProviderLocked(instance);
+    for (const auto& instance : mHidlServiceProxy->listServices()) {
+        this->addHidlProviderLocked(instance);
     }
+    return OK;
+}
+
+static std::string getFullAidlProviderName(const std::string instance) {
+    std::string aidlHalServiceDescriptor =
+            std::string(aidl::android::hardware::camera::provider::ICameraProvider::descriptor);
+   return aidlHalServiceDescriptor + "/" + instance;
+}
+
+status_t CameraProviderManager::tryToAddAidlProvidersLocked() {
+    const char * aidlHalServiceDescriptor =
+            aidl::android::hardware::camera::provider::ICameraProvider::descriptor;
+    auto sm = defaultServiceManager();
+    auto aidlProviders = sm->getDeclaredInstances(
+            String16(aidlHalServiceDescriptor));
+    for (const auto &aidlInstance : aidlProviders) {
+        std::string aidlServiceName =
+                getFullAidlProviderName(std::string(String8(aidlInstance).c_str()));
+        auto res = sm->registerForNotifications(String16(aidlServiceName.c_str()), this);
+        if (res != OK) {
+            ALOGE("%s Unable to register for notifications with AIDL service manager",
+                    __FUNCTION__);
+            return res;
+        }
+        addAidlProviderLocked(aidlServiceName);
+    }
+    return OK;
+}
+
+status_t CameraProviderManager::initialize(wp<CameraProviderManager::StatusListener> listener,
+        HidlServiceInteractionProxy* hidlProxy) {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+    if (hidlProxy == nullptr) {
+        ALOGE("%s: No valid service interaction proxy provided", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    mListener = listener;
+    mDeviceState = 0;
+    auto res = tryToInitAndAddHidlProvidersLocked(hidlProxy);
+    if (res != OK) {
+        // Logging done in called function;
+        return res;
+    }
+    res = tryToAddAidlProvidersLocked();
 
     IPCThreadState::self()->flushCommands();
 
-    return OK;
+    return res;
 }
 
 std::pair<int, int> CameraProviderManager::getCameraCount() const {
@@ -211,15 +274,13 @@
     return deviceIds;
 }
 
-bool CameraProviderManager::isValidDevice(const std::string &id, uint16_t majorVersion) const {
-    std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    return isValidDeviceLocked(id, majorVersion);
-}
-
-bool CameraProviderManager::isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const {
+bool CameraProviderManager::isValidDeviceLocked(const std::string &id, uint16_t majorVersion,
+        IPCTransport transport) const {
     for (auto& provider : mProviders) {
+        IPCTransport providerTransport = provider->getIPCTransport();
         for (auto& deviceInfo : provider->mDevices) {
-            if (deviceInfo->mId == id && deviceInfo->mVersion.get_major() == majorVersion) {
+            if (deviceInfo->mId == id && deviceInfo->mVersion.get_major() == majorVersion &&
+                    transport == providerTransport) {
                 return true;
             }
         }
@@ -267,15 +328,31 @@
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
-        const hardware::camera::device::V3_7::StreamConfiguration &configuration,
-        bool *status /*out*/) const {
+        const SessionConfiguration &configuration, bool overrideForPerfClass,
+        metadataGetter getMetadata, bool *status /*out*/) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) {
         return NAME_NOT_FOUND;
     }
 
-    return deviceInfo->isSessionConfigurationSupported(configuration, status);
+    return deviceInfo->isSessionConfigurationSupported(configuration,
+            overrideForPerfClass, getMetadata, status);
+}
+
+status_t CameraProviderManager::getCameraIdIPCTransport(const std::string &id,
+        IPCTransport *providerTransport) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) {
+        return NAME_NOT_FOUND;
+    }
+    sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
+    if (parentProvider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    *providerTransport = parentProvider->getIPCTransport();
+    return OK;
 }
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
@@ -285,39 +362,106 @@
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
-        hardware::hidl_version *v) {
+        hardware::hidl_version *v, IPCTransport *transport) {
+    if (v == nullptr || transport == nullptr) {
+        return BAD_VALUE;
+    }
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     hardware::hidl_version maxVersion{0,0};
     bool found = false;
+    IPCTransport providerTransport = IPCTransport::INVALID;
     for (auto& provider : mProviders) {
         for (auto& deviceInfo : provider->mDevices) {
             if (deviceInfo->mId == id) {
                 if (deviceInfo->mVersion > maxVersion) {
                     maxVersion = deviceInfo->mVersion;
+                    providerTransport = provider->getIPCTransport();
                     found = true;
                 }
             }
         }
     }
-    if (!found) {
+    if (!found || providerTransport == IPCTransport::INVALID) {
         return NAME_NOT_FOUND;
     }
     *v = maxVersion;
+    *transport = providerTransport;
     return OK;
 }
 
+status_t CameraProviderManager::getTorchStrengthLevel(const std::string &id,
+        int32_t* torchStrength /*out*/) {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    return deviceInfo->getTorchStrengthLevel(torchStrength);
+}
+
+status_t CameraProviderManager::turnOnTorchWithStrengthLevel(const std::string &id,
+        int32_t torchStrength) {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    return deviceInfo->turnOnTorchWithStrengthLevel(torchStrength);
+}
+
+bool CameraProviderManager::shouldSkipTorchStrengthUpdate(const std::string &id,
+        int32_t torchStrength) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    if (deviceInfo->mTorchStrengthLevel == torchStrength) {
+        ALOGV("%s: Skipping torch strength level updates prev_level: %d, new_level: %d",
+                __FUNCTION__, deviceInfo->mTorchStrengthLevel, torchStrength);
+        return true;
+    }
+    return false;
+}
+
+int32_t CameraProviderManager::getTorchDefaultStrengthLevel(const std::string &id) const {
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    return deviceInfo->mTorchDefaultStrengthLevel;
+}
+
 bool CameraProviderManager::supportSetTorchMode(const std::string &id) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     for (auto& provider : mProviders) {
-        auto deviceInfo = findDeviceInfoLocked(id);
-        if (deviceInfo != nullptr) {
-            return provider->mSetTorchModeSupported;
+        for (auto& deviceInfo : provider->mDevices) {
+            if (deviceInfo->mId == id) {
+                return provider->mSetTorchModeSupported;
+            }
         }
     }
     return false;
 }
 
+template <class ProviderInfoType, class HalCameraProviderType>
+status_t CameraProviderManager::setTorchModeT(sp<ProviderInfo> &parentProvider,
+        std::shared_ptr<HalCameraProvider> *halCameraProvider) {
+    if (halCameraProvider == nullptr) {
+        return BAD_VALUE;
+    }
+    ProviderInfoType *idlProviderInfo = static_cast<ProviderInfoType *>(parentProvider.get());
+    auto idlInterface = idlProviderInfo->startProviderInterface();
+    if (idlInterface == nullptr) {
+        return DEAD_OBJECT;
+    }
+    *halCameraProvider =
+            std::make_shared<HalCameraProviderType>(idlInterface, idlInterface->descriptor);
+    return OK;
+}
+
 status_t CameraProviderManager::setTorchMode(const std::string &id, bool enabled) {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
@@ -330,11 +474,26 @@
     if (parentProvider == nullptr) {
         return DEAD_OBJECT;
     }
-    const sp<provider::V2_4::ICameraProvider> interface = parentProvider->startProviderInterface();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
+    std::shared_ptr<HalCameraProvider> halCameraProvider = nullptr;
+    IPCTransport providerTransport = parentProvider->getIPCTransport();
+    status_t res = OK;
+    if (providerTransport == IPCTransport::HIDL) {
+        res = setTorchModeT<HidlProviderInfo, HidlHalCameraProvider>(parentProvider,
+                &halCameraProvider);
+        if (res != OK) {
+            return res;
+        }
+    } else if (providerTransport == IPCTransport::AIDL) {
+        res = setTorchModeT<AidlProviderInfo, AidlHalCameraProvider>(parentProvider,
+                &halCameraProvider);
+        if (res != OK) {
+            return res;
+        }
+    } else {
+        ALOGE("%s Invalid provider transport", __FUNCTION__);
+        return INVALID_OPERATION;
     }
-    saveRef(DeviceMode::TORCH, deviceInfo->mId, interface);
+    saveRef(DeviceMode::TORCH, deviceInfo->mId, halCameraProvider);
 
     return deviceInfo->setTorchMode(enabled);
 }
@@ -351,8 +510,72 @@
     return OK;
 }
 
-status_t CameraProviderManager::notifyDeviceStateChange(
-        hardware::hidl_bitfield<provider::V2_5::DeviceState> newState) {
+sp<CameraProviderManager::ProviderInfo> CameraProviderManager::startExternalLazyProvider() const {
+    std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    for (const auto& providerInfo : mProviders) {
+        if (providerInfo->isExternalLazyHAL()) {
+            if (!providerInfo->successfullyStartedProviderInterface()) {
+                return nullptr;
+            } else {
+                return providerInfo;
+            }
+        }
+    }
+    return nullptr;
+}
+
+status_t CameraProviderManager::notifyUsbDeviceEvent(int32_t eventId,
+                                                     const std::string& usbDeviceId) {
+    if (!kEnableLazyHal) {
+        return OK;
+    }
+
+    ALOGV("notifySystemEvent: %d usbDeviceId : %s", eventId, usbDeviceId.c_str());
+
+    if (eventId == android::hardware::ICameraService::EVENT_USB_DEVICE_ATTACHED) {
+        sp<ProviderInfo> externalProvider = startExternalLazyProvider();
+        if (externalProvider != nullptr) {
+            auto usbDevices = mExternalUsbDevicesForProvider.first;
+            usbDevices.push_back(usbDeviceId);
+            mExternalUsbDevicesForProvider = {usbDevices, externalProvider};
+        }
+    } else if (eventId
+          == android::hardware::ICameraService::EVENT_USB_DEVICE_DETACHED) {
+        usbDeviceDetached(usbDeviceId);
+    }
+
+    return OK;
+}
+
+status_t CameraProviderManager::usbDeviceDetached(const std::string &usbDeviceId) {
+    std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
+    std::lock_guard<std::mutex> interfaceLock(mInterfaceMutex);
+
+    auto usbDevices = mExternalUsbDevicesForProvider.first;
+    auto foundId = std::find(usbDevices.begin(), usbDevices.end(), usbDeviceId);
+    if (foundId != usbDevices.end()) {
+        sp<ProviderInfo> providerInfo = mExternalUsbDevicesForProvider.second;
+        if (providerInfo == nullptr) {
+              ALOGE("%s No valid external provider for USB device: %s",
+                    __FUNCTION__,
+                    usbDeviceId.c_str());
+              mExternalUsbDevicesForProvider = {std::vector<std::string>(), nullptr};
+              return DEAD_OBJECT;
+        } else {
+            mInterfaceMutex.unlock();
+            providerInfo->removeAllDevices();
+            mInterfaceMutex.lock();
+            mExternalUsbDevicesForProvider = {std::vector<std::string>(), nullptr};
+        }
+    } else {
+        return DEAD_OBJECT;
+    }
+    return OK;
+}
+
+status_t CameraProviderManager::notifyDeviceStateChange(int64_t newState) {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     mDeviceState = newState;
     status_t res = OK;
@@ -378,32 +601,115 @@
     return res;
 }
 
-status_t CameraProviderManager::openSession(const std::string &id,
+status_t CameraProviderManager::openAidlSession(const std::string &id,
+        const std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraDeviceCallback>& callback,
+        /*out*/
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraDeviceSession> *session) {
+
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    auto *aidlDeviceInfo3 = static_cast<AidlProviderInfo::AidlDeviceInfo3*>(deviceInfo);
+    sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
+    if (parentProvider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    auto provider =
+            static_cast<AidlProviderInfo *>(parentProvider.get())->startProviderInterface();
+    if (provider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    std::shared_ptr<HalCameraProvider> halCameraProvider =
+            std::make_shared<AidlHalCameraProvider>(provider, provider->descriptor);
+    saveRef(DeviceMode::CAMERA, id, halCameraProvider);
+
+    auto interface = aidlDeviceInfo3->startDeviceInterface();
+    if (interface == nullptr) {
+        removeRef(DeviceMode::CAMERA, id);
+        return DEAD_OBJECT;
+    }
+
+    auto ret = interface->open(callback, session);
+    if (!ret.isOk()) {
+        removeRef(DeviceMode::CAMERA, id);
+        ALOGE("%s: Transaction error opening a session for camera device %s: %s",
+                __FUNCTION__, id.c_str(), ret.getMessage());
+        return AidlProviderInfo::mapToStatusT(ret);
+    }
+    return OK;
+}
+
+status_t CameraProviderManager::openAidlInjectionSession(const std::string &id,
+        const std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraDeviceCallback>& callback,
+        /*out*/
+        std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraInjectionSession> *session) {
+
+    std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+    auto *aidlDeviceInfo3 = static_cast<AidlProviderInfo::AidlDeviceInfo3*>(deviceInfo);
+    sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
+    if (parentProvider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    auto provider =
+            static_cast<AidlProviderInfo *>(parentProvider.get())->startProviderInterface();
+    if (provider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    std::shared_ptr<HalCameraProvider> halCameraProvider =
+            std::make_shared<AidlHalCameraProvider>(provider, provider->descriptor);
+    saveRef(DeviceMode::CAMERA, id, halCameraProvider);
+
+    auto interface = aidlDeviceInfo3->startDeviceInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+
+    auto ret = interface->openInjectionSession(callback, session);
+    if (!ret.isOk()) {
+        removeRef(DeviceMode::CAMERA, id);
+        ALOGE("%s: Transaction error opening a session for camera device %s: %s",
+                __FUNCTION__, id.c_str(), ret.getMessage());
+        return DEAD_OBJECT;
+    }
+    return OK;
+}
+
+status_t CameraProviderManager::openHidlSession(const std::string &id,
         const sp<device::V3_2::ICameraDeviceCallback>& callback,
         /*out*/
         sp<device::V3_2::ICameraDeviceSession> *session) {
 
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-    auto deviceInfo = findDeviceInfoLocked(id,
-            /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
+    auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    auto *deviceInfo3 = static_cast<ProviderInfo::DeviceInfo3*>(deviceInfo);
+    auto *hidlDeviceInfo3 = static_cast<HidlProviderInfo::HidlDeviceInfo3*>(deviceInfo);
     sp<ProviderInfo> parentProvider = deviceInfo->mParentProvider.promote();
     if (parentProvider == nullptr) {
         return DEAD_OBJECT;
     }
-    const sp<provider::V2_4::ICameraProvider> provider = parentProvider->startProviderInterface();
+    const sp<provider::V2_4::ICameraProvider> provider =
+            static_cast<HidlProviderInfo *>(parentProvider.get())->startProviderInterface();
     if (provider == nullptr) {
         return DEAD_OBJECT;
     }
-    saveRef(DeviceMode::CAMERA, id, provider);
+    std::shared_ptr<HalCameraProvider> halCameraProvider =
+            std::make_shared<HidlHalCameraProvider>(provider, provider->descriptor);
+    saveRef(DeviceMode::CAMERA, id, halCameraProvider);
 
     Status status;
     hardware::Return<void> ret;
-    auto interface = deviceInfo3->startDeviceInterface<
-            CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT>();
+    auto interface = hidlDeviceInfo3->startDeviceInterface();
     if (interface == nullptr) {
         return DEAD_OBJECT;
     }
@@ -421,17 +727,18 @@
                 __FUNCTION__, id.c_str(), ret.description().c_str());
         return DEAD_OBJECT;
     }
-    return mapToStatusT(status);
+    return HidlProviderInfo::mapToStatusT(status);
 }
 
 void CameraProviderManager::saveRef(DeviceMode usageType, const std::string &cameraId,
-        sp<provider::V2_4::ICameraProvider> provider) {
+        std::shared_ptr<HalCameraProvider> provider) {
     if (!kEnableLazyHal) {
         return;
     }
-    ALOGV("Saving camera provider %s for camera device %s", provider->descriptor, cameraId.c_str());
+    ALOGV("Saving camera provider %s for camera device %s", provider->mDescriptor.c_str(),
+              cameraId.c_str());
     std::lock_guard<std::mutex> lock(mProviderInterfaceMapLock);
-    std::unordered_map<std::string, sp<provider::V2_4::ICameraProvider>> *primaryMap, *alternateMap;
+    std::unordered_map<std::string, std::shared_ptr<HalCameraProvider>> *primaryMap, *alternateMap;
     if (usageType == DeviceMode::TORCH) {
         primaryMap = &mTorchProviderByCameraId;
         alternateMap = &mCameraProviderByCameraId;
@@ -454,7 +761,7 @@
         return;
     }
     ALOGV("Removing camera device %s", cameraId.c_str());
-    std::unordered_map<std::string, sp<provider::V2_4::ICameraProvider>> *providerMap;
+    std::unordered_map<std::string, std::shared_ptr<HalCameraProvider>> *providerMap;
     if (usageType == DeviceMode::TORCH) {
         providerMap = &mTorchProviderByCameraId;
     } else {
@@ -479,6 +786,26 @@
     }
 }
 
+// We ignore sp<IBinder> param here since we need std::shared_ptr<...> which
+// will be retrieved through the ndk api through addAidlProviderLocked ->
+// tryToInitializeAidlProvider.
+void CameraProviderManager::onServiceRegistration(const String16 &name, const sp<IBinder>&) {
+    status_t res = OK;
+    std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
+    {
+        std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+        res = addAidlProviderLocked(String8(name).c_str());
+    }
+
+    sp<StatusListener> listener = getStatusListener();
+    if (nullptr != listener.get() && res == OK) {
+        listener->onNewProviderRegistered();
+    }
+
+    IPCThreadState::self()->flushCommands();
+}
+
 hardware::Return<void> CameraProviderManager::onRegistration(
         const hardware::hidl_string& /*fqName*/,
         const hardware::hidl_string& name,
@@ -488,7 +815,7 @@
     {
         std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-        res = addProviderLocked(name, preexisting);
+        res = addHidlProviderLocked(name, preexisting);
     }
 
     sp<StatusListener> listener = getStatusListener();
@@ -510,10 +837,70 @@
     return OK;
 }
 
+void CameraProviderManager::ProviderInfo::initializeProviderInfoCommon(
+        const std::vector<std::string> &devices) {
+
+    sp<StatusListener> listener = mManager->getStatusListener();
+
+    for (auto& device : devices) {
+        std::string id;
+        status_t res = addDevice(device, CameraDeviceStatus::PRESENT, &id);
+        if (res != OK) {
+            ALOGE("%s: Unable to enumerate camera device '%s': %s (%d)",
+                    __FUNCTION__, device.c_str(), strerror(-res), res);
+            continue;
+        }
+    }
+
+    ALOGI("Camera provider %s ready with %zu camera devices",
+            mProviderName.c_str(), mDevices.size());
+
+    // Process cached status callbacks
+    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
+            std::make_unique<std::vector<CameraStatusInfoT>>();
+    {
+        std::lock_guard<std::mutex> lock(mInitLock);
+
+        for (auto& statusInfo : mCachedStatus) {
+            std::string id, physicalId;
+            status_t res = OK;
+            if (statusInfo.isPhysicalCameraStatus) {
+                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+                    statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
+            } else {
+                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
+            }
+            if (res == OK) {
+                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
+                        id.c_str(), physicalId.c_str(), statusInfo.status);
+            }
+        }
+        mCachedStatus.clear();
+
+        mInitialized = true;
+    }
+
+    // The cached status change callbacks cannot be fired directly from this
+    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
+    // twice.
+    if (listener != nullptr) {
+        mInitialStatusCallbackFuture = std::async(std::launch::async,
+                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
+                listener, std::move(cachedStatus));
+    }
+}
+
 CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
-        const std::string& id,
-        hardware::hidl_version minVersion, hardware::hidl_version maxVersion) const {
+        const std::string& id) const {
     for (auto& provider : mProviders) {
+        using hardware::hidl_version;
+        IPCTransport transport = provider->getIPCTransport();
+        // AIDL min version starts at major: 1 minor: 1
+        hidl_version minVersion =
+                (transport == IPCTransport::HIDL) ? hidl_version{3, 2} : hidl_version{1, 1} ;
+        hidl_version maxVersion =
+                (transport == IPCTransport::HIDL) ? hidl_version{3, 7} : hidl_version{1000, 0};
+
         for (auto& deviceInfo : provider->mDevices) {
             if (deviceInfo->mId == id &&
                     minVersion <= deviceInfo->mVersion && maxVersion >= deviceInfo->mVersion) {
@@ -525,16 +912,13 @@
 }
 
 metadata_vendor_id_t CameraProviderManager::getProviderTagIdLocked(
-        const std::string& id, hardware::hidl_version minVersion,
-        hardware::hidl_version maxVersion) const {
+        const std::string& id) const {
     metadata_vendor_id_t ret = CAMERA_METADATA_INVALID_VENDOR_ID;
 
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     for (auto& provider : mProviders) {
         for (auto& deviceInfo : provider->mDevices) {
-            if (deviceInfo->mId == id &&
-                    minVersion <= deviceInfo->mVersion &&
-                    maxVersion >= deviceInfo->mVersion) {
+            if (deviceInfo->mId == id) {
                 return provider->mProviderTagid;
             }
         }
@@ -857,12 +1241,45 @@
     return OK;
 }
 
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupTorchStrengthTags() {
+    status_t res = OK;
+    auto& c = mCameraCharacteristics;
+    auto flashInfoStrengthDefaultLevelEntry = c.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
+    if (flashInfoStrengthDefaultLevelEntry.count == 0) {
+        int32_t flashInfoStrengthDefaultLevel = 1;
+        res = c.update(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+                &flashInfoStrengthDefaultLevel, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL: %s (%d)",
+                    __FUNCTION__,strerror(-res), res);
+            return res;
+        }
+    }
+    auto flashInfoStrengthMaximumLevelEntry = c.find(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL);
+    if (flashInfoStrengthMaximumLevelEntry.count == 0) {
+        int32_t flashInfoStrengthMaximumLevel = 1;
+        res = c.update(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+                &flashInfoStrengthMaximumLevel, 1);
+        if (res != OK) {
+            ALOGE("%s: Failed to update ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL: %s (%d)",
+                    __FUNCTION__,strerror(-res), res);
+            return res;
+        }
+    }
+    return res;
+}
+
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupMonochromeTags() {
     status_t res = OK;
     auto& c = mCameraCharacteristics;
-
+    sp<ProviderInfo> parentProvider = mParentProvider.promote();
+    if (parentProvider == nullptr) {
+        return DEAD_OBJECT;
+    }
+    IPCTransport ipcTransport = parentProvider->getIPCTransport();
     // Override static metadata for MONOCHROME camera with older device version
-    if (mVersion.get_major() == 3 && mVersion.get_minor() < 5) {
+    if (ipcTransport == IPCTransport::HIDL &&
+            (mVersion.get_major() == 3 && mVersion.get_minor() < 5)) {
         camera_metadata_entry cap = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
         for (size_t i = 0; i < cap.count; i++) {
             if (cap.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
@@ -1222,6 +1639,7 @@
     }
 
     for (auto& provider : mProviders) {
+        IPCTransport transport = provider->getIPCTransport();
         for (auto& deviceInfo : provider->mDevices) {
             std::vector<std::string> physicalIds;
             if (deviceInfo->mIsLogicalCamera) {
@@ -1229,7 +1647,8 @@
                         cameraId) != deviceInfo->mPhysicalIds.end()) {
                     int deviceVersion = HARDWARE_DEVICE_API_VERSION(
                             deviceInfo->mVersion.get_major(), deviceInfo->mVersion.get_minor());
-                    if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) {
+                    if (transport == IPCTransport::HIDL &&
+                            deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) {
                         ALOGE("%s: Wrong deviceVersion %x for hiddenPhysicalCameraId %s",
                                 __FUNCTION__, deviceVersion, cameraId.c_str());
                         return falseRet;
@@ -1244,23 +1663,89 @@
     return falseRet;
 }
 
-status_t CameraProviderManager::tryToInitializeProviderLocked(
+status_t CameraProviderManager::tryToInitializeAidlProviderLocked(
         const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
-    sp<provider::V2_4::ICameraProvider> interface;
-    interface = mServiceProxy->tryGetService(providerName);
+    using aidl::android::hardware::camera::provider::ICameraProvider;
+    std::shared_ptr<ICameraProvider> interface =
+            ICameraProvider::fromBinder(ndk::SpAIBinder(
+                    AServiceManager_getService(providerName.c_str())));
 
     if (interface == nullptr) {
-        // The interface may not be started yet. In that case, this is not a
-        // fatal error.
-        ALOGW("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
+        ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
                 providerName.c_str());
         return BAD_VALUE;
     }
 
-    return providerInfo->initialize(interface, mDeviceState);
+    AidlProviderInfo *aidlProviderInfo = static_cast<AidlProviderInfo *>(providerInfo.get());
+    return aidlProviderInfo->initializeAidlProvider(interface, mDeviceState);
 }
 
-status_t CameraProviderManager::addProviderLocked(const std::string& newProvider,
+status_t CameraProviderManager::tryToInitializeHidlProviderLocked(
+        const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
+    sp<provider::V2_4::ICameraProvider> interface;
+    interface = mHidlServiceProxy->tryGetService(providerName);
+
+    if (interface == nullptr) {
+        // The interface may not be started yet. In that case, this is not a
+        // fatal error.
+        ALOGW("%s: HIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
+                providerName.c_str());
+        return BAD_VALUE;
+    }
+
+    HidlProviderInfo *hidlProviderInfo = static_cast<HidlProviderInfo *>(providerInfo.get());
+    return hidlProviderInfo->initializeHidlProvider(interface, mDeviceState);
+}
+
+status_t CameraProviderManager::addAidlProviderLocked(const std::string& newProvider) {
+    // Several camera provider instances can be temporarily present.
+    // Defer initialization of a new instance until the older instance is properly removed.
+    auto providerInstance = newProvider + "-" + std::to_string(mProviderInstanceId);
+    bool providerPresent = false;
+    bool preexisting =
+            (mAidlProviderWithBinders.find(newProvider) != mAidlProviderWithBinders.end());
+
+    // We need to use the extracted provider name here since 'newProvider' has
+    // the fully qualified name of the provider service in case of AIDL. We want
+    // just instance name.
+    using aidl::android::hardware::camera::provider::ICameraProvider;
+    std::string extractedProviderName =
+            newProvider.substr(std::string(ICameraProvider::descriptor).size() + 1);
+    for (const auto& providerInfo : mProviders) {
+        if (providerInfo->mProviderName == extractedProviderName) {
+            ALOGW("%s: Camera provider HAL with name '%s' already registered",
+                    __FUNCTION__, newProvider.c_str());
+            // Do not add new instances for lazy HAL external provider or aidl
+            // binders previously seen.
+            if (preexisting || providerInfo->isExternalLazyHAL()) {
+                return ALREADY_EXISTS;
+            } else {
+                ALOGW("%s: The new provider instance will get initialized immediately after the"
+                        " currently present instance is removed!", __FUNCTION__);
+                providerPresent = true;
+                break;
+            }
+        }
+    }
+
+    sp<AidlProviderInfo> providerInfo =
+            new AidlProviderInfo(extractedProviderName, providerInstance, this);
+
+    if (!providerPresent) {
+        status_t res = tryToInitializeAidlProviderLocked(newProvider, providerInfo);
+        if (res != OK) {
+            return res;
+        }
+        mAidlProviderWithBinders.emplace(newProvider);
+    }
+
+    mProviders.push_back(providerInfo);
+    mProviderInstanceId++;
+
+    return OK;
+}
+
+status_t CameraProviderManager::addHidlProviderLocked(const std::string& newProvider,
         bool preexisting) {
     // Several camera provider instances can be temporarily present.
     // Defer initialization of a new instance until the older instance is properly removed.
@@ -1270,9 +1755,10 @@
         if (providerInfo->mProviderName == newProvider) {
             ALOGW("%s: Camera provider HAL with name '%s' already registered",
                     __FUNCTION__, newProvider.c_str());
-            if (preexisting) {
+            // Do not add new instances for lazy HAL external provider
+            if (preexisting || providerInfo->isExternalLazyHAL()) {
                 return ALREADY_EXISTS;
-            } else{
+            } else {
                 ALOGW("%s: The new provider instance will get initialized immediately after the"
                         " currently present instance is removed!", __FUNCTION__);
                 providerPresent = true;
@@ -1281,9 +1767,9 @@
         }
     }
 
-    sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, providerInstance, this);
+    sp<HidlProviderInfo> providerInfo = new HidlProviderInfo(newProvider, providerInstance, this);
     if (!providerPresent) {
-        status_t res = tryToInitializeProviderLocked(newProvider, providerInfo);
+        status_t res = tryToInitializeHidlProviderLocked(newProvider, providerInfo);
         if (res != OK) {
             return res;
         }
@@ -1321,7 +1807,17 @@
         // initialize.
         for (const auto& providerInfo : mProviders) {
             if (providerInfo->mProviderName == removedProviderName) {
-                return tryToInitializeProviderLocked(removedProviderName, providerInfo);
+                IPCTransport providerTransport = providerInfo->getIPCTransport();
+                std::string removedAidlProviderName = getFullAidlProviderName(removedProviderName);
+                switch(providerTransport) {
+                    case IPCTransport::HIDL:
+                        return tryToInitializeHidlProviderLocked(removedProviderName, providerInfo);
+                    case IPCTransport::AIDL:
+                        return tryToInitializeAidlProviderLocked(removedAidlProviderName,
+                                providerInfo);
+                    default:
+                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, providerTransport);
+                }
             }
         }
 
@@ -1343,7 +1839,6 @@
 sp<CameraProviderManager::StatusListener> CameraProviderManager::getStatusListener() const {
     return mListener.promote();
 }
-
 /**** Methods for ProviderInfo ****/
 
 
@@ -1359,286 +1854,63 @@
     (void) mManager;
 }
 
-status_t CameraProviderManager::ProviderInfo::initialize(
-        sp<provider::V2_4::ICameraProvider>& interface,
-        hardware::hidl_bitfield<provider::V2_5::DeviceState> currentDeviceState) {
-    status_t res = parseProviderName(mProviderName, &mType, &mId);
-    if (res != OK) {
-        ALOGE("%s: Invalid provider name, ignoring", __FUNCTION__);
-        return BAD_VALUE;
-    }
-    ALOGI("Connecting to new camera provider: %s, isRemote? %d",
-            mProviderName.c_str(), interface->isRemote());
-
-    // Determine minor version
-    mMinorVersion = 4;
-    auto cast2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
-    sp<provider::V2_6::ICameraProvider> interface2_6 = nullptr;
-    if (cast2_6.isOk()) {
-        interface2_6 = cast2_6;
-        if (interface2_6 != nullptr) {
-            mMinorVersion = 6;
-        }
-    }
-    // We need to check again since cast2_6.isOk() succeeds even if the provider
-    // version isn't actually 2.6.
-    if (interface2_6 == nullptr){
-        auto cast2_5 =
-                provider::V2_5::ICameraProvider::castFrom(interface);
-        sp<provider::V2_5::ICameraProvider> interface2_5 = nullptr;
-        if (cast2_5.isOk()) {
-            interface2_5 = cast2_5;
-            if (interface != nullptr) {
-                mMinorVersion = 5;
-            }
-        }
-    } else {
-        auto cast2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
-        if (cast2_7.isOk()) {
-            sp<provider::V2_7::ICameraProvider> interface2_7 = cast2_7;
-            if (interface2_7 != nullptr) {
-                mMinorVersion = 7;
-            }
-        }
-    }
-
-    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
-    // before setCallback returns
-    hardware::Return<Status> status = interface->setCallback(this);
-    if (!status.isOk()) {
-        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
-                __FUNCTION__, mProviderName.c_str(), status.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to register callbacks with camera provider '%s'",
-                __FUNCTION__, mProviderName.c_str());
-        return mapToStatusT(status);
-    }
-
-    hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
-    if (!linked.isOk()) {
-        ALOGE("%s: Transaction error in linking to camera provider '%s' death: %s",
-                __FUNCTION__, mProviderName.c_str(), linked.description().c_str());
-        return DEAD_OBJECT;
-    } else if (!linked) {
-        ALOGW("%s: Unable to link to provider '%s' death notifications",
-                __FUNCTION__, mProviderName.c_str());
-    }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    } else {
-        mActiveInterface = interface;
-    }
-
-    ALOGV("%s: Setting device state for %s: 0x%" PRIx64,
-            __FUNCTION__, mProviderName.c_str(), mDeviceState);
-    notifyDeviceStateChange(currentDeviceState);
-
-    res = setUpVendorTags();
-    if (res != OK) {
-        ALOGE("%s: Unable to set up vendor tags from provider '%s'",
-                __FUNCTION__, mProviderName.c_str());
-        return res;
-    }
-
-    // Get initial list of camera devices, if any
-    std::vector<std::string> devices;
-    hardware::Return<void> ret = interface->getCameraIdList([&status, this, &devices](
-            Status idStatus,
-            const hardware::hidl_vec<hardware::hidl_string>& cameraDeviceNames) {
-        status = idStatus;
-        if (status == Status::OK) {
-            for (auto& name : cameraDeviceNames) {
-                uint16_t major, minor;
-                std::string type, id;
-                status_t res = parseDeviceName(name, &major, &minor, &type, &id);
-                if (res != OK) {
-                    ALOGE("%s: Error parsing deviceName: %s: %d", __FUNCTION__, name.c_str(), res);
-                    status = Status::INTERNAL_ERROR;
-                } else {
-                    devices.push_back(name);
-                    mProviderPublicCameraIds.push_back(id);
-                }
-            }
-        } });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error in getting camera ID list from provider '%s': %s",
-                __FUNCTION__, mProviderName.c_str(), linked.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to query for camera devices from provider '%s'",
-                __FUNCTION__, mProviderName.c_str());
-        return mapToStatusT(status);
-    }
-
-    // Get list of concurrent streaming camera device combinations
-    if (mMinorVersion >= 6) {
-        res = getConcurrentCameraIdsInternalLocked(interface2_6);
-        if (res != OK) {
-            return res;
-        }
-    }
-
-    ret = interface->isSetTorchModeSupported(
-        [this](auto status, bool supported) {
-            if (status == Status::OK) {
-                mSetTorchModeSupported = supported;
-            }
-        });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error checking torch mode support '%s': %s",
-                __FUNCTION__, mProviderName.c_str(), ret.description().c_str());
-        return DEAD_OBJECT;
-    }
-
-    mIsRemote = interface->isRemote();
-
-    sp<StatusListener> listener = mManager->getStatusListener();
-    for (auto& device : devices) {
-        std::string id;
-        status_t res = addDevice(device, common::V1_0::CameraDeviceStatus::PRESENT, &id);
-        if (res != OK) {
-            ALOGE("%s: Unable to enumerate camera device '%s': %s (%d)",
-                    __FUNCTION__, device.c_str(), strerror(-res), res);
-            continue;
-        }
-    }
-
-    ALOGI("Camera provider %s ready with %zu camera devices",
-            mProviderName.c_str(), mDevices.size());
-
-    // Process cached status callbacks
-    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
-            std::make_unique<std::vector<CameraStatusInfoT>>();
-    {
-        std::lock_guard<std::mutex> lock(mInitLock);
-
-        for (auto& statusInfo : mCachedStatus) {
-            std::string id, physicalId;
-            status_t res = OK;
-            if (statusInfo.isPhysicalCameraStatus) {
-                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
-                    statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
-            } else {
-                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
-            }
-            if (res == OK) {
-                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
-                        id.c_str(), physicalId.c_str(), statusInfo.status);
-            }
-        }
-        mCachedStatus.clear();
-
-        mInitialized = true;
-    }
-
-    // The cached status change callbacks cannot be fired directly from this
-    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
-    // twice.
-    if (listener != nullptr) {
-        mInitialStatusCallbackFuture = std::async(std::launch::async,
-                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
-                listener, std::move(cachedStatus));
-    }
-
-    return OK;
-}
-
-const sp<provider::V2_4::ICameraProvider>
-CameraProviderManager::ProviderInfo::startProviderInterface() {
-    ATRACE_CALL();
-    ALOGV("Request to start camera provider: %s", mProviderName.c_str());
-    if (mSavedInterface != nullptr) {
-        return mSavedInterface;
-    }
-    if (!kEnableLazyHal) {
-        ALOGE("Bad provider state! Should not be here on a non-lazy HAL!");
-        return nullptr;
-    }
-
-    auto interface = mActiveInterface.promote();
-    if (interface == nullptr) {
-        ALOGI("Camera HAL provider needs restart, calling getService(%s)", mProviderName.c_str());
-        interface = mManager->mServiceProxy->getService(mProviderName);
-        interface->setCallback(this);
-        hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
-        if (!linked.isOk()) {
-            ALOGE("%s: Transaction error in linking to camera provider '%s' death: %s",
-                    __FUNCTION__, mProviderName.c_str(), linked.description().c_str());
-            mManager->removeProvider(mProviderName);
-            return nullptr;
-        } else if (!linked) {
-            ALOGW("%s: Unable to link to provider '%s' death notifications",
-                    __FUNCTION__, mProviderName.c_str());
-        }
-        // Send current device state
-        if (mMinorVersion >= 5) {
-            auto castResult = provider::V2_5::ICameraProvider::castFrom(interface);
-            if (castResult.isOk()) {
-                sp<provider::V2_5::ICameraProvider> interface_2_5 = castResult;
-                if (interface_2_5 != nullptr) {
-                    ALOGV("%s: Initial device state for %s: 0x %" PRIx64,
-                            __FUNCTION__, mProviderName.c_str(), mDeviceState);
-                    interface_2_5->notifyDeviceStateChange(mDeviceState);
-                }
-            }
-        }
-
-        mActiveInterface = interface;
-    } else {
-        ALOGV("Camera provider (%s) already in use. Re-using instance.", mProviderName.c_str());
-    }
-    return interface;
-}
-
 const std::string& CameraProviderManager::ProviderInfo::getType() const {
     return mType;
 }
 
-status_t CameraProviderManager::ProviderInfo::addDevice(const std::string& name,
-        CameraDeviceStatus initialStatus, /*out*/ std::string* parsedId) {
+status_t CameraProviderManager::ProviderInfo::addDevice(
+        const std::string& name, CameraDeviceStatus initialStatus,
+        /*out*/ std::string* parsedId) {
 
     ALOGI("Enumerating new camera device: %s", name.c_str());
 
     uint16_t major, minor;
     std::string type, id;
+    IPCTransport transport = getIPCTransport();
 
     status_t res = parseDeviceName(name, &major, &minor, &type, &id);
     if (res != OK) {
         return res;
     }
+
     if (type != mType) {
         ALOGE("%s: Device type %s does not match provider type %s", __FUNCTION__,
                 type.c_str(), mType.c_str());
         return BAD_VALUE;
     }
-    if (mManager->isValidDeviceLocked(id, major)) {
+    if (mManager->isValidDeviceLocked(id, major, transport)) {
         ALOGE("%s: Device %s: ID %s is already in use for device major version %d", __FUNCTION__,
                 name.c_str(), id.c_str(), major);
         return BAD_VALUE;
     }
 
     std::unique_ptr<DeviceInfo> deviceInfo;
-    switch (major) {
-        case 1:
-            ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:", __FUNCTION__,
-                    name.c_str(), major);
-            return BAD_VALUE;
-        case 3:
-            deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, mProviderTagid,
-                    id, minor);
+    switch (transport) {
+        case IPCTransport::HIDL:
+            switch (major) {
+                case 3:
+                    break;
+                default:
+                    ALOGE("%s: Device %s: Unsupported HIDL device HAL major version %d:",
+                          __FUNCTION__,  name.c_str(), major);
+                    return BAD_VALUE;
+            }
+            break;
+        case IPCTransport::AIDL:
+            if (major != 1) {
+                ALOGE("%s: Device %s: Unsupported AIDL device HAL major version %d:", __FUNCTION__,
+                        name.c_str(), major);
+                return BAD_VALUE;
+            }
             break;
         default:
-            ALOGE("%s: Device %s: Unknown HIDL device HAL major version %d:", __FUNCTION__,
-                    name.c_str(), major);
+            ALOGE("%s Invalid transport %d", __FUNCTION__, transport);
             return BAD_VALUE;
     }
+
+    deviceInfo = initializeDeviceInfo(name, mProviderTagid, id, minor);
     if (deviceInfo == nullptr) return BAD_VALUE;
-    deviceInfo->notifyDeviceStateChange(mDeviceState);
+    deviceInfo->notifyDeviceStateChange(getDeviceState());
     deviceInfo->mStatus = initialStatus;
     bool isAPI1Compatible = deviceInfo->isAPI1Compatible();
 
@@ -1666,15 +1938,50 @@
             mUniqueCameraIds.erase(id);
             if ((*it)->isAPI1Compatible()) {
                 mUniqueAPI1CompatibleCameraIds.erase(std::remove(
-                        mUniqueAPI1CompatibleCameraIds.begin(),
-                        mUniqueAPI1CompatibleCameraIds.end(), id));
+                    mUniqueAPI1CompatibleCameraIds.begin(),
+                    mUniqueAPI1CompatibleCameraIds.end(), id));
             }
+
+            // Remove reference to camera provider to avoid pointer leak when
+            // unplugging external camera while in use with lazy HALs
+            mManager->removeRef(DeviceMode::CAMERA, id);
+            mManager->removeRef(DeviceMode::TORCH, id);
+
             mDevices.erase(it);
             break;
         }
     }
 }
 
+void CameraProviderManager::ProviderInfo::removeAllDevices() {
+    std::lock_guard<std::mutex> lock(mLock);
+
+    auto itDevices = mDevices.begin();
+    while (itDevices != mDevices.end()) {
+        std::string id = (*itDevices)->mId;
+        std::string deviceName = (*itDevices)->mName;
+        removeDevice(id);
+        // device was removed, reset iterator
+        itDevices = mDevices.begin();
+
+        //notify CameraService of status change
+        sp<StatusListener> listener = mManager->getStatusListener();
+        if (listener != nullptr) {
+            mLock.unlock();
+            ALOGV("%s: notify device not_present: %s",
+                  __FUNCTION__,
+                  deviceName.c_str());
+            listener->onDeviceStatusChanged(String8(id.c_str()),
+                                            CameraDeviceStatus::NOT_PRESENT);
+            mLock.lock();
+        }
+    }
+}
+
+bool CameraProviderManager::ProviderInfo::isExternalLazyHAL() const {
+    return kEnableLazyHal && (mProviderName == kExternalProviderName);
+}
+
 status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
     dprintf(fd, "== Camera Provider HAL %s (v2.%d, %s) static info: %zu devices: ==\n",
             mProviderInstance.c_str(),
@@ -1749,117 +2056,57 @@
     return OK;
 }
 
-status_t CameraProviderManager::ProviderInfo::getConcurrentCameraIdsInternalLocked(
-        sp<provider::V2_6::ICameraProvider> &interface2_6) {
-    if (interface2_6 == nullptr) {
-        ALOGE("%s: null interface provided", __FUNCTION__);
-        return BAD_VALUE;
-    }
-    Status status = Status::OK;
-    hardware::Return<void> ret =
-            interface2_6->getConcurrentStreamingCameraIds([&status, this](
-            Status concurrentIdStatus, // TODO: Move all instances of hidl_string to 'using'
-            const hardware::hidl_vec<hardware::hidl_vec<hardware::hidl_string>>&
-                        cameraDeviceIdCombinations) {
-            status = concurrentIdStatus;
-            if (status == Status::OK) {
-                mConcurrentCameraIdCombinations.clear();
-                for (auto& combination : cameraDeviceIdCombinations) {
-                    std::unordered_set<std::string> deviceIds;
-                    for (auto &cameraDeviceId : combination) {
-                        deviceIds.insert(cameraDeviceId.c_str());
-                    }
-                    mConcurrentCameraIdCombinations.push_back(std::move(deviceIds));
-                }
-            } });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error in getting concurrent camera ID list from provider '%s'",
-                __FUNCTION__, mProviderName.c_str());
-            return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to query for camera devices from provider '%s'",
-                    __FUNCTION__, mProviderName.c_str());
-        return mapToStatusT(status);
-    }
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::reCacheConcurrentStreamingCameraIdsLocked() {
-    if (mMinorVersion < 6) {
-      // Unsupported operation, nothing to do here
-      return OK;
-    }
-    // Check if the provider is currently active - not going to start it up for this notification
-    auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.promote();
-    if (interface == nullptr) {
-        ALOGE("%s: camera provider interface for %s is not valid", __FUNCTION__,
-                mProviderName.c_str());
-        return INVALID_OPERATION;
-    }
-    auto castResult = provider::V2_6::ICameraProvider::castFrom(interface);
-
-    if (castResult.isOk()) {
-        sp<provider::V2_6::ICameraProvider> interface2_6 = castResult;
-        if (interface2_6 != nullptr) {
-            return getConcurrentCameraIdsInternalLocked(interface2_6);
-        } else {
-            // This should not happen since mMinorVersion >= 6
-            ALOGE("%s: mMinorVersion was >= 6, but interface2_6 was nullptr", __FUNCTION__);
-            return UNKNOWN_ERROR;
-        }
-    }
-    return OK;
-}
-
 std::vector<std::unordered_set<std::string>>
 CameraProviderManager::ProviderInfo::getConcurrentCameraIdCombinations() {
     std::lock_guard<std::mutex> lock(mLock);
     return mConcurrentCameraIdCombinations;
 }
 
-hardware::Return<void> CameraProviderManager::ProviderInfo::cameraDeviceStatusChange(
-        const hardware::hidl_string& cameraDeviceName,
-        CameraDeviceStatus newStatus) {
+void CameraProviderManager::ProviderInfo::cameraDeviceStatusChangeInternal(
+        const std::string& cameraDeviceName, CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
     std::lock_guard<std::mutex> lock(mInitLock);
-
+    CameraDeviceStatus internalNewStatus = newStatus;
     if (!mInitialized) {
         mCachedStatus.emplace_back(false /*isPhysicalCameraStatus*/,
-                cameraDeviceName.c_str(), std::string().c_str(), newStatus);
-        return hardware::Void();
+                cameraDeviceName.c_str(), std::string().c_str(),
+                internalNewStatus);
+        return;
     }
 
     {
         std::lock_guard<std::mutex> lock(mLock);
         if (OK != cameraDeviceStatusChangeLocked(&id, cameraDeviceName, newStatus)) {
-            return hardware::Void();
+            return;
         }
         listener = mManager->getStatusListener();
     }
 
     // Call without lock held to allow reentrancy into provider manager
     if (listener != nullptr) {
-        listener->onDeviceStatusChanged(String8(id.c_str()), newStatus);
+        listener->onDeviceStatusChanged(String8(id.c_str()), internalNewStatus);
     }
-
-    return hardware::Void();
 }
 
 status_t CameraProviderManager::ProviderInfo::cameraDeviceStatusChangeLocked(
-        std::string* id, const hardware::hidl_string& cameraDeviceName,
+        std::string* id, const std::string& cameraDeviceName,
         CameraDeviceStatus newStatus) {
     bool known = false;
     std::string cameraId;
     for (auto& deviceInfo : mDevices) {
         if (deviceInfo->mName == cameraDeviceName) {
+            Mutex::Autolock l(deviceInfo->mDeviceAvailableLock);
             ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
-                    deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
+                    FrameworkDeviceStatusToString(newStatus),
+                    FrameworkDeviceStatusToString(deviceInfo->mStatus));
             deviceInfo->mStatus = newStatus;
             // TODO: Handle device removal (NOT_PRESENT)
             cameraId = deviceInfo->mId;
             known = true;
+            deviceInfo->mIsDeviceAvailable =
+                (newStatus == CameraDeviceStatus::PRESENT);
+            deviceInfo->mDeviceAvailableSignal.signal();
             break;
         }
     }
@@ -1873,7 +2120,13 @@
         addDevice(cameraDeviceName, newStatus, &cameraId);
     } else if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
         removeDevice(cameraId);
+    } else if (isExternalLazyHAL()) {
+        // Do not notify CameraService for PRESENT->PRESENT (lazy HAL restart)
+        // because NOT_AVAILABLE is set on CameraService::connect and a PRESENT
+        // notif. would overwrite it
+        return BAD_VALUE;
     }
+
     if (reCacheConcurrentStreamingCameraIdsLocked() != OK) {
         ALOGE("%s: CameraProvider %s could not re-cache concurrent streaming camera id list ",
                   __FUNCTION__, mProviderName.c_str());
@@ -1882,19 +2135,18 @@
     return OK;
 }
 
-hardware::Return<void> CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChange(
-        const hardware::hidl_string& cameraDeviceName,
-        const hardware::hidl_string& physicalCameraDeviceName,
+void CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChangeInternal(
+        const std::string& cameraDeviceName,
+        const std::string& physicalCameraDeviceName,
         CameraDeviceStatus newStatus) {
     sp<StatusListener> listener;
     std::string id;
     std::string physicalId;
     std::lock_guard<std::mutex> lock(mInitLock);
-
     if (!mInitialized) {
         mCachedStatus.emplace_back(true /*isPhysicalCameraStatus*/, cameraDeviceName,
                 physicalCameraDeviceName, newStatus);
-        return hardware::Void();
+        return;
     }
 
     {
@@ -1902,7 +2154,7 @@
 
         if (OK != physicalCameraDeviceStatusChangeLocked(&id, &physicalId, cameraDeviceName,
                 physicalCameraDeviceName, newStatus)) {
-            return hardware::Void();
+            return;
         }
 
         listener = mManager->getStatusListener();
@@ -1912,13 +2164,13 @@
         listener->onDeviceStatusChanged(String8(id.c_str()),
                 String8(physicalId.c_str()), newStatus);
     }
-    return hardware::Void();
+    return;
 }
 
 status_t CameraProviderManager::ProviderInfo::physicalCameraDeviceStatusChangeLocked(
             std::string* id, std::string* physicalId,
-            const hardware::hidl_string& cameraDeviceName,
-            const hardware::hidl_string& physicalCameraDeviceName,
+            const std::string& cameraDeviceName,
+            const std::string& physicalCameraDeviceName,
             CameraDeviceStatus newStatus) {
     bool known = false;
     std::string cameraId;
@@ -1938,7 +2190,7 @@
             }
             ALOGI("Camera device %s physical device %s status is now %s",
                     cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(),
-                    deviceStatusToString(newStatus));
+                    FrameworkDeviceStatusToString(newStatus));
             known = true;
             break;
         }
@@ -1956,22 +2208,25 @@
     return OK;
 }
 
-hardware::Return<void> CameraProviderManager::ProviderInfo::torchModeStatusChange(
-        const hardware::hidl_string& cameraDeviceName,
+void CameraProviderManager::ProviderInfo::torchModeStatusChangeInternal(
+        const std::string& cameraDeviceName,
         TorchModeStatus newStatus) {
     sp<StatusListener> listener;
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     std::string id;
+    bool known = false;
     {
-        std::lock_guard<std::mutex> lock(mManager->mStatusListenerMutex);
-        bool known = false;
+        // Hold mLock for accessing mDevices
+        std::lock_guard<std::mutex> lock(mLock);
         for (auto& deviceInfo : mDevices) {
             if (deviceInfo->mName == cameraDeviceName) {
                 ALOGI("Camera device %s torch status is now %s", cameraDeviceName.c_str(),
-                        torchStatusToString(newStatus));
+                        FrameworkTorchStatusToString(newStatus));
                 id = deviceInfo->mId;
                 known = true;
+                systemCameraKind = deviceInfo->mSystemCameraKind;
                 if (TorchModeStatus::AVAILABLE_ON != newStatus) {
-                    mManager->removeRef(DeviceMode::TORCH, id);
+                    mManager->removeRef(CameraProviderManager::DeviceMode::TORCH, id);
                 }
                 break;
             }
@@ -1979,175 +2234,33 @@
         if (!known) {
             ALOGW("Camera provider %s says an unknown camera %s now has torch status %d. Curious.",
                     mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
-            return hardware::Void();
+            return;
         }
+        // no lock needed since listener is set up only once during
+        // CameraProviderManager initialization and then never changed till it is
+        // destructed.
         listener = mManager->getStatusListener();
-    }
+     }
     // Call without lock held to allow reentrancy into provider manager
+    // The problem with holding mLock here is that we
+    // might be limiting re-entrancy : CameraService::onTorchStatusChanged calls
+    // back into CameraProviderManager which might try to hold mLock again (eg:
+    // findDeviceInfo, which should be holding mLock while iterating through
+    // each provider's devices).
     if (listener != nullptr) {
-        listener->onTorchStatusChanged(String8(id.c_str()), newStatus);
+        listener->onTorchStatusChanged(String8(id.c_str()), newStatus, systemCameraKind);
     }
-    return hardware::Void();
-}
-
-void CameraProviderManager::ProviderInfo::serviceDied(uint64_t cookie,
-        const wp<hidl::base::V1_0::IBase>& who) {
-    (void) who;
-    ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
-    if (cookie != mId) {
-        ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
-                __FUNCTION__, cookie, mId);
-    }
-    mManager->removeProvider(mProviderInstance);
-}
-
-status_t CameraProviderManager::ProviderInfo::setUpVendorTags() {
-    if (mVendorTagDescriptor != nullptr)
-        return OK;
-
-    hardware::hidl_vec<VendorTagSection> vts;
-    Status status;
-    hardware::Return<void> ret;
-    const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    ret = interface->getVendorTags(
-        [&](auto s, const auto& vendorTagSecs) {
-            status = s;
-            if (s == Status::OK) {
-                vts = vendorTagSecs;
-            }
-    });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error getting vendor tags from provider '%s': %s",
-                __FUNCTION__, mProviderName.c_str(), ret.description().c_str());
-        return DEAD_OBJECT;
-    }
-    if (status != Status::OK) {
-        return mapToStatusT(status);
-    }
-
-    // Read all vendor tag definitions into a descriptor
-    status_t res;
-    if ((res = HidlVendorTagDescriptor::createDescriptorFromHidl(vts, /*out*/mVendorTagDescriptor))
-            != OK) {
-        ALOGE("%s: Could not generate descriptor from vendor tag operations,"
-                "received error %s (%d). Camera clients will not be able to use"
-                "vendor tags", __FUNCTION__, strerror(res), res);
-        return res;
-    }
-
-    return OK;
+    return;
 }
 
 void CameraProviderManager::ProviderInfo::notifyDeviceInfoStateChangeLocked(
-        hardware::hidl_bitfield<provider::V2_5::DeviceState> newDeviceState) {
+        int64_t newDeviceState) {
     std::lock_guard<std::mutex> lock(mLock);
     for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
         (*it)->notifyDeviceStateChange(newDeviceState);
     }
 }
 
-status_t CameraProviderManager::ProviderInfo::notifyDeviceStateChange(
-        hardware::hidl_bitfield<provider::V2_5::DeviceState> newDeviceState) {
-    mDeviceState = newDeviceState;
-    if (mMinorVersion >= 5) {
-        // Check if the provider is currently active - not going to start it up for this notification
-        auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.promote();
-        if (interface != nullptr) {
-            // Send current device state
-            auto castResult = provider::V2_5::ICameraProvider::castFrom(interface);
-            if (castResult.isOk()) {
-                sp<provider::V2_5::ICameraProvider> interface_2_5 = castResult;
-                if (interface_2_5 != nullptr) {
-                    interface_2_5->notifyDeviceStateChange(mDeviceState);
-                }
-            }
-        }
-    }
-    return OK;
-}
-
-status_t CameraProviderManager::ProviderInfo::isConcurrentSessionConfigurationSupported(
-        const hardware::hidl_vec<CameraIdAndStreamCombination> &halCameraIdsAndStreamCombinations,
-        bool *isSupported) {
-    status_t res = OK;
-    if (mMinorVersion >= 6) {
-        // Check if the provider is currently active - not going to start it up for this notification
-        auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.promote();
-        if (interface == nullptr) {
-            // TODO: This might be some other problem
-            return INVALID_OPERATION;
-        }
-        auto castResult2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
-        auto castResult2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
-        Status callStatus;
-        auto cb =
-                [&isSupported, &callStatus](Status s, bool supported) {
-                      callStatus = s;
-                      *isSupported = supported; };
-
-        ::android::hardware::Return<void> ret;
-        sp<provider::V2_7::ICameraProvider> interface_2_7;
-        sp<provider::V2_6::ICameraProvider> interface_2_6;
-        if (mMinorVersion >= 7 && castResult2_7.isOk()) {
-            interface_2_7 = castResult2_7;
-            if (interface_2_7 != nullptr) {
-                ret = interface_2_7->isConcurrentStreamCombinationSupported_2_7(
-                        halCameraIdsAndStreamCombinations, cb);
-            }
-        } else if (mMinorVersion == 6 && castResult2_6.isOk()) {
-            interface_2_6 = castResult2_6;
-            if (interface_2_6 != nullptr) {
-                hardware::hidl_vec<provider::V2_6::CameraIdAndStreamCombination>
-                        halCameraIdsAndStreamCombinations_2_6;
-                size_t numStreams = halCameraIdsAndStreamCombinations.size();
-                halCameraIdsAndStreamCombinations_2_6.resize(numStreams);
-                for (size_t i = 0; i < numStreams; i++) {
-                    using namespace camera3;
-                    auto const& combination = halCameraIdsAndStreamCombinations[i];
-                    halCameraIdsAndStreamCombinations_2_6[i].cameraId = combination.cameraId;
-                    bool success =
-                            SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
-                                    halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
-                                    combination.streamConfiguration);
-                    if (!success) {
-                        *isSupported = false;
-                        return OK;
-                    }
-                }
-                ret = interface_2_6->isConcurrentStreamCombinationSupported(
-                        halCameraIdsAndStreamCombinations_2_6, cb);
-            }
-        }
-
-        if (interface_2_7 != nullptr || interface_2_6 != nullptr) {
-            if (ret.isOk()) {
-                switch (callStatus) {
-                    case Status::OK:
-                        // Expected case, do nothing.
-                        res = OK;
-                        break;
-                    case Status::METHOD_NOT_SUPPORTED:
-                        res = INVALID_OPERATION;
-                        break;
-                    default:
-                        ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                  callStatus);
-                        res = UNKNOWN_ERROR;
-                }
-            } else {
-                ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
-                res = UNKNOWN_ERROR;
-            }
-            return res;
-        }
-    }
-    // unsupported operation
-    return INVALID_OPERATION;
-}
-
 void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
         sp<StatusListener> listener,
         std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
@@ -2162,290 +2275,16 @@
     }
 }
 
-template<class DeviceInfoT>
-std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
-    CameraProviderManager::ProviderInfo::initializeDeviceInfo(
-        const std::string &name, const metadata_vendor_id_t tagId,
-        const std::string &id, uint16_t minorVersion) {
-    Status status;
-
-    auto cameraInterface =
-            startDeviceInterface<typename DeviceInfoT::InterfaceT>(name);
-    if (cameraInterface == nullptr) return nullptr;
-
-    CameraResourceCost resourceCost;
-    cameraInterface->getResourceCost([&status, &resourceCost](
-        Status s, CameraResourceCost cost) {
-                status = s;
-                resourceCost = cost;
-            });
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to obtain resource costs for camera device %s: %s", __FUNCTION__,
-                name.c_str(), statusToString(status));
-        return nullptr;
-    }
-
-    for (auto& conflictName : resourceCost.conflictingDevices) {
-        uint16_t major, minor;
-        std::string type, id;
-        status_t res = parseDeviceName(conflictName, &major, &minor, &type, &id);
-        if (res != OK) {
-            ALOGE("%s: Failed to parse conflicting device %s", __FUNCTION__, conflictName.c_str());
-            return nullptr;
-        }
-        conflictName = id;
-    }
-
-    return std::unique_ptr<DeviceInfo>(
-        new DeviceInfoT(name, tagId, id, minorVersion, resourceCost, this,
-                mProviderPublicCameraIds, cameraInterface));
-}
-
-template<class InterfaceT>
-sp<InterfaceT>
-CameraProviderManager::ProviderInfo::startDeviceInterface(const std::string &name) {
-    ALOGE("%s: Device %s: Unknown HIDL device HAL major version %d:", __FUNCTION__,
-            name.c_str(), InterfaceT::version.get_major());
-    return nullptr;
-}
-
-template<>
-sp<device::V3_2::ICameraDevice>
-CameraProviderManager::ProviderInfo::startDeviceInterface
-        <device::V3_2::ICameraDevice>(const std::string &name) {
-    Status status;
-    sp<device::V3_2::ICameraDevice> cameraInterface;
-    hardware::Return<void> ret;
-    const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
-    if (interface == nullptr) {
-        return nullptr;
-    }
-    ret = interface->getCameraDeviceInterface_V3_x(name, [&status, &cameraInterface](
-        Status s, sp<device::V3_2::ICameraDevice> interface) {
-                status = s;
-                cameraInterface = interface;
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
-                __FUNCTION__, name.c_str(), ret.description().c_str());
-        return nullptr;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
-                name.c_str(), statusToString(status));
-        return nullptr;
-    }
-    return cameraInterface;
-}
-
-CameraProviderManager::ProviderInfo::DeviceInfo::~DeviceInfo() {}
-
-template<class InterfaceT>
-sp<InterfaceT> CameraProviderManager::ProviderInfo::DeviceInfo::startDeviceInterface() {
-    sp<InterfaceT> device;
-    ATRACE_CALL();
-    if (mSavedInterface == nullptr) {
-        sp<ProviderInfo> parentProvider = mParentProvider.promote();
-        if (parentProvider != nullptr) {
-            device = parentProvider->startDeviceInterface<InterfaceT>(mName);
-        }
-    } else {
-        device = (InterfaceT *) mSavedInterface.get();
-    }
-    return device;
-}
-
-template<class InterfaceT>
-status_t CameraProviderManager::ProviderInfo::DeviceInfo::setTorchMode(InterfaceT& interface,
-        bool enabled) {
-    Status s = interface->setTorchMode(enabled ? TorchMode::ON : TorchMode::OFF);
-    return mapToStatusT(s);
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
         const CameraResourceCost& resourceCost,
         sp<ProviderInfo> parentProvider,
-        const std::vector<std::string>& publicCameraIds,
-        sp<InterfaceT> interface) :
+        const std::vector<std::string>& publicCameraIds) :
         DeviceInfo(name, tagId, id, hardware::hidl_version{3, minorVersion},
-                   publicCameraIds, resourceCost, parentProvider) {
-    // Get camera characteristics and initialize flash unit availability
-    Status status;
-    hardware::Return<void> ret;
-    ret = interface->getCameraCharacteristics([&status, this](Status s,
-                    device::V3_2::CameraMetadata metadata) {
-                status = s;
-                if (s == Status::OK) {
-                    camera_metadata_t *buffer =
-                            reinterpret_cast<camera_metadata_t*>(metadata.data());
-                    size_t expectedSize = metadata.size();
-                    int res = validate_camera_metadata_structure(buffer, &expectedSize);
-                    if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
-                        set_camera_metadata_vendor_id(buffer, mProviderTagid);
-                        mCameraCharacteristics = buffer;
-                    } else {
-                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
-                        status = Status::INTERNAL_ERROR;
-                    }
-                }
-            });
-    if (!ret.isOk()) {
-        ALOGE("%s: Transaction error getting camera characteristics for device %s"
-                " to check for a flash unit: %s", __FUNCTION__, id.c_str(),
-                ret.description().c_str());
-        return;
-    }
-    if (status != Status::OK) {
-        ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
-                __FUNCTION__, id.c_str(), CameraProviderManager::statusToString(status), status);
-        return;
-    }
+                   publicCameraIds, resourceCost, parentProvider) { }
 
-    if (mCameraCharacteristics.exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
-        const auto &stateMap = mCameraCharacteristics.find(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
-        if ((stateMap.count > 0) && ((stateMap.count % 2) == 0)) {
-            for (size_t i = 0; i < stateMap.count; i += 2) {
-                mDeviceStateOrientationMap.emplace(stateMap.data.i64[i], stateMap.data.i64[i+1]);
-            }
-        } else {
-            ALOGW("%s: Invalid ANDROID_INFO_DEVICE_STATE_ORIENTATIONS map size: %zu", __FUNCTION__,
-                    stateMap.count);
-        }
-    }
-
-    mSystemCameraKind = getSystemCameraKind();
-
-    status_t res = fixupMonochromeTags();
-    if (OK != res) {
-        ALOGE("%s: Unable to fix up monochrome tags based for older HAL version: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return;
-    }
-    auto stat = addDynamicDepthTags();
-    if (OK != stat) {
-        ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
-                stat);
-    }
-    res = deriveHeicTags();
-    if (OK != res) {
-        ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-    }
-
-    if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
-        status_t status = addDynamicDepthTags(/*maxResolution*/true);
-        if (OK != status) {
-            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
-                    __FUNCTION__, strerror(-status), status);
-        }
-
-        status = deriveHeicTags(/*maxResolution*/true);
-        if (OK != status) {
-            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
-                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
-        }
-    }
-
-    res = addRotateCropTags();
-    if (OK != res) {
-        ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
-                strerror(-res), res);
-    }
-    res = addPreCorrectionActiveArraySize();
-    if (OK != res) {
-        ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
-                strerror(-res), res);
-    }
-    res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
-            &mCameraCharacteristics, &mSupportNativeZoomRatio);
-    if (OK != res) {
-        ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-    }
-
-    camera_metadata_entry flashAvailable =
-            mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
-    if (flashAvailable.count == 1 &&
-            flashAvailable.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
-        mHasFlashUnit = true;
-    } else {
-        mHasFlashUnit = false;
-    }
-
-    queryPhysicalCameraIds();
-
-    // Get physical camera characteristics if applicable
-    auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
-    if (!castResult.isOk()) {
-        ALOGV("%s: Unable to convert ICameraDevice instance to version 3.5", __FUNCTION__);
-        return;
-    }
-    sp<device::V3_5::ICameraDevice> interface_3_5 = castResult;
-    if (interface_3_5 == nullptr) {
-        ALOGE("%s: Converted ICameraDevice instance to nullptr", __FUNCTION__);
-        return;
-    }
-
-    if (mIsLogicalCamera) {
-        for (auto& id : mPhysicalIds) {
-            if (std::find(mPublicCameraIds.begin(), mPublicCameraIds.end(), id) !=
-                    mPublicCameraIds.end()) {
-                continue;
-            }
-
-            hardware::hidl_string hidlId(id);
-            ret = interface_3_5->getPhysicalCameraCharacteristics(hidlId,
-                    [&status, &id, this](Status s, device::V3_2::CameraMetadata metadata) {
-                status = s;
-                if (s == Status::OK) {
-                    camera_metadata_t *buffer =
-                            reinterpret_cast<camera_metadata_t*>(metadata.data());
-                    size_t expectedSize = metadata.size();
-                    int res = validate_camera_metadata_structure(buffer, &expectedSize);
-                    if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
-                        set_camera_metadata_vendor_id(buffer, mProviderTagid);
-                        mPhysicalCameraCharacteristics[id] = buffer;
-                    } else {
-                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
-                        status = Status::INTERNAL_ERROR;
-                    }
-                }
-            });
-
-            if (!ret.isOk()) {
-                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
-                        __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
-                return;
-            }
-            if (status != Status::OK) {
-                ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
-                        __FUNCTION__, id.c_str(), mId.c_str(),
-                        CameraProviderManager::statusToString(status), status);
-                return;
-            }
-
-            res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
-                    &mPhysicalCameraCharacteristics[id], &mSupportNativeZoomRatio);
-            if (OK != res) {
-                ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
-                        __FUNCTION__, strerror(-res), res);
-            }
-        }
-    }
-
-    if (!kEnableLazyHal) {
-        // Save HAL reference indefinitely
-        mSavedInterface = interface;
-    }
-}
-
-CameraProviderManager::ProviderInfo::DeviceInfo3::~DeviceInfo3() {}
-
-void CameraProviderManager::ProviderInfo::DeviceInfo3::notifyDeviceStateChange(
-        hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState> newState) {
-
+void CameraProviderManager::ProviderInfo::DeviceInfo3::notifyDeviceStateChange(int64_t newState) {
     if (!mDeviceStateOrientationMap.empty() &&
             (mDeviceStateOrientationMap.find(newState) != mDeviceStateOrientationMap.end())) {
         mCameraCharacteristics.update(ANDROID_SENSOR_ORIENTATION,
@@ -2453,10 +2292,6 @@
     }
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::setTorchMode(bool enabled) {
-    return setTorchModeForDevice<InterfaceT>(enabled);
-}
-
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
@@ -2512,21 +2347,6 @@
     return isBackwardCompatible;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::dumpState(int fd) {
-    native_handle_t* handle = native_handle_create(1,0);
-    handle->data[0] = fd;
-    const sp<InterfaceT> interface = startDeviceInterface<InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    auto ret = interface->dumpState(handle);
-    native_handle_delete(handle);
-    if (!ret.isOk()) {
-        return INVALID_OPERATION;
-    }
-    return OK;
-}
-
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
         bool overrideForPerfClass, CameraMetadata *characteristics) const {
     if (characteristics == nullptr) return BAD_VALUE;
@@ -2552,63 +2372,6 @@
     return OK;
 }
 
-status_t CameraProviderManager::ProviderInfo::DeviceInfo3::isSessionConfigurationSupported(
-        const hardware::camera::device::V3_7::StreamConfiguration &configuration,
-        bool *status /*out*/) {
-
-    const sp<CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT> interface =
-            this->startDeviceInterface<CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT>();
-    if (interface == nullptr) {
-        return DEAD_OBJECT;
-    }
-    auto castResult_3_5 = device::V3_5::ICameraDevice::castFrom(interface);
-    sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult_3_5;
-    auto castResult_3_7 = device::V3_7::ICameraDevice::castFrom(interface);
-    sp<hardware::camera::device::V3_7::ICameraDevice> interface_3_7 = castResult_3_7;
-
-    status_t res;
-    Status callStatus;
-    ::android::hardware::Return<void> ret;
-    auto halCb =
-            [&callStatus, &status] (Status s, bool combStatus) {
-                callStatus = s;
-                *status = combStatus;
-            };
-    if (interface_3_7 != nullptr) {
-        ret = interface_3_7->isStreamCombinationSupported_3_7(configuration, halCb);
-    } else if (interface_3_5 != nullptr) {
-        hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
-        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
-                configuration_3_4, configuration);
-        if (!success) {
-            *status = false;
-            return OK;
-        }
-        ret = interface_3_5->isStreamCombinationSupported(configuration_3_4, halCb);
-    } else {
-        return INVALID_OPERATION;
-    }
-    if (ret.isOk()) {
-        switch (callStatus) {
-            case Status::OK:
-                // Expected case, do nothing.
-                res = OK;
-                break;
-            case Status::METHOD_NOT_SUPPORTED:
-                res = INVALID_OPERATION;
-                break;
-            default:
-                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, callStatus);
-                res = UNKNOWN_ERROR;
-        }
-    } else {
-        ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
-        res = UNKNOWN_ERROR;
-    }
-
-    return res;
-}
-
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::filterSmallJpegSizes() {
     int32_t thresholdW = SessionConfigurationUtils::PERF_CLASS_JPEG_THRESH_W;
     int32_t thresholdH = SessionConfigurationUtils::PERF_CLASS_JPEG_THRESH_H;
@@ -2860,8 +2623,6 @@
     return OK;
 }
 
-
-
 CameraProviderManager::ProviderInfo::~ProviderInfo() {
     if (mInitialStatusCallbackFuture.valid()) {
         mInitialStatusCallbackFuture.wait();
@@ -2870,167 +2631,6 @@
     // CameraProvider interface dies, so do not unregister callbacks.
 }
 
-status_t CameraProviderManager::mapToStatusT(const Status& s)  {
-    switch(s) {
-        case Status::OK:
-            return OK;
-        case Status::ILLEGAL_ARGUMENT:
-            return BAD_VALUE;
-        case Status::CAMERA_IN_USE:
-            return -EBUSY;
-        case Status::MAX_CAMERAS_IN_USE:
-            return -EUSERS;
-        case Status::METHOD_NOT_SUPPORTED:
-            return UNKNOWN_TRANSACTION;
-        case Status::OPERATION_NOT_SUPPORTED:
-            return INVALID_OPERATION;
-        case Status::CAMERA_DISCONNECTED:
-            return DEAD_OBJECT;
-        case Status::INTERNAL_ERROR:
-            return INVALID_OPERATION;
-    }
-    ALOGW("Unexpected HAL status code %d", s);
-    return INVALID_OPERATION;
-}
-
-const char* CameraProviderManager::statusToString(const Status& s) {
-    switch(s) {
-        case Status::OK:
-            return "OK";
-        case Status::ILLEGAL_ARGUMENT:
-            return "ILLEGAL_ARGUMENT";
-        case Status::CAMERA_IN_USE:
-            return "CAMERA_IN_USE";
-        case Status::MAX_CAMERAS_IN_USE:
-            return "MAX_CAMERAS_IN_USE";
-        case Status::METHOD_NOT_SUPPORTED:
-            return "METHOD_NOT_SUPPORTED";
-        case Status::OPERATION_NOT_SUPPORTED:
-            return "OPERATION_NOT_SUPPORTED";
-        case Status::CAMERA_DISCONNECTED:
-            return "CAMERA_DISCONNECTED";
-        case Status::INTERNAL_ERROR:
-            return "INTERNAL_ERROR";
-    }
-    ALOGW("Unexpected HAL status code %d", s);
-    return "UNKNOWN_ERROR";
-}
-
-const char* CameraProviderManager::deviceStatusToString(const CameraDeviceStatus& s) {
-    switch(s) {
-        case CameraDeviceStatus::NOT_PRESENT:
-            return "NOT_PRESENT";
-        case CameraDeviceStatus::PRESENT:
-            return "PRESENT";
-        case CameraDeviceStatus::ENUMERATING:
-            return "ENUMERATING";
-    }
-    ALOGW("Unexpected HAL device status code %d", s);
-    return "UNKNOWN_STATUS";
-}
-
-const char* CameraProviderManager::torchStatusToString(const TorchModeStatus& s) {
-    switch(s) {
-        case TorchModeStatus::NOT_AVAILABLE:
-            return "NOT_AVAILABLE";
-        case TorchModeStatus::AVAILABLE_OFF:
-            return "AVAILABLE_OFF";
-        case TorchModeStatus::AVAILABLE_ON:
-            return "AVAILABLE_ON";
-    }
-    ALOGW("Unexpected HAL torch mode status code %d", s);
-    return "UNKNOWN_STATUS";
-}
-
-
-status_t HidlVendorTagDescriptor::createDescriptorFromHidl(
-        const hardware::hidl_vec<common::V1_0::VendorTagSection>& vts,
-        /*out*/
-        sp<VendorTagDescriptor>& descriptor) {
-
-    int tagCount = 0;
-
-    for (size_t s = 0; s < vts.size(); s++) {
-        tagCount += vts[s].tags.size();
-    }
-
-    if (tagCount < 0 || tagCount > INT32_MAX) {
-        ALOGE("%s: tag count %d from vendor tag sections is invalid.", __FUNCTION__, tagCount);
-        return BAD_VALUE;
-    }
-
-    Vector<uint32_t> tagArray;
-    LOG_ALWAYS_FATAL_IF(tagArray.resize(tagCount) != tagCount,
-            "%s: too many (%u) vendor tags defined.", __FUNCTION__, tagCount);
-
-
-    sp<HidlVendorTagDescriptor> desc = new HidlVendorTagDescriptor();
-    desc->mTagCount = tagCount;
-
-    SortedVector<String8> sections;
-    KeyedVector<uint32_t, String8> tagToSectionMap;
-
-    int idx = 0;
-    for (size_t s = 0; s < vts.size(); s++) {
-        const common::V1_0::VendorTagSection& section = vts[s];
-        const char *sectionName = section.sectionName.c_str();
-        if (sectionName == NULL) {
-            ALOGE("%s: no section name defined for vendor tag section %zu.", __FUNCTION__, s);
-            return BAD_VALUE;
-        }
-        String8 sectionString(sectionName);
-        sections.add(sectionString);
-
-        for (size_t j = 0; j < section.tags.size(); j++) {
-            uint32_t tag = section.tags[j].tagId;
-            if (tag < CAMERA_METADATA_VENDOR_TAG_BOUNDARY) {
-                ALOGE("%s: vendor tag %d not in vendor tag section.", __FUNCTION__, tag);
-                return BAD_VALUE;
-            }
-
-            tagArray.editItemAt(idx++) = section.tags[j].tagId;
-
-            const char *tagName = section.tags[j].tagName.c_str();
-            if (tagName == NULL) {
-                ALOGE("%s: no tag name defined for vendor tag %d.", __FUNCTION__, tag);
-                return BAD_VALUE;
-            }
-            desc->mTagToNameMap.add(tag, String8(tagName));
-            tagToSectionMap.add(tag, sectionString);
-
-            int tagType = (int) section.tags[j].tagType;
-            if (tagType < 0 || tagType >= NUM_TYPES) {
-                ALOGE("%s: tag type %d from vendor ops does not exist.", __FUNCTION__, tagType);
-                return BAD_VALUE;
-            }
-            desc->mTagToTypeMap.add(tag, tagType);
-        }
-    }
-
-    desc->mSections = sections;
-
-    for (size_t i = 0; i < tagArray.size(); ++i) {
-        uint32_t tag = tagArray[i];
-        String8 sectionString = tagToSectionMap.valueFor(tag);
-
-        // Set up tag to section index map
-        ssize_t index = sections.indexOf(sectionString);
-        LOG_ALWAYS_FATAL_IF(index < 0, "index %zd must be non-negative", index);
-        desc->mTagToSectionMap.add(tag, static_cast<uint32_t>(index));
-
-        // Set up reverse mapping
-        ssize_t reverseIndex = -1;
-        if ((reverseIndex = desc->mReverseMapping.indexOfKey(sectionString)) < 0) {
-            KeyedVector<String8, uint32_t>* nameMapper = new KeyedVector<String8, uint32_t>();
-            reverseIndex = desc->mReverseMapping.add(sectionString, nameMapper);
-        }
-        desc->mReverseMapping[reverseIndex]->add(desc->mTagToNameMap.valueFor(tag), tag);
-    }
-
-    descriptor = std::move(desc);
-    return OK;
-}
-
 // Expects to have mInterfaceMutex locked
 std::vector<std::unordered_set<std::string>>
 CameraProviderManager::getConcurrentCameraIds() const {
@@ -3044,59 +2644,6 @@
     return deviceIdCombinations;
 }
 
-status_t CameraProviderManager::convertToHALStreamCombinationAndCameraIdsLocked(
-        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
-        const std::set<std::string>& perfClassPrimaryCameraIds,
-        int targetSdkVersion,
-        hardware::hidl_vec<CameraIdAndStreamCombination> *halCameraIdsAndStreamCombinations,
-        bool *earlyExit) {
-    binder::Status bStatus = binder::Status::ok();
-    std::vector<CameraIdAndStreamCombination> halCameraIdsAndStreamsV;
-    bool shouldExit = false;
-    status_t res = OK;
-    for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
-        const std::string& cameraId = cameraIdAndSessionConfig.mCameraId;
-        hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
-        CameraMetadata deviceInfo;
-        bool overrideForPerfClass =
-                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
-                        perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
-        if (res != OK) {
-            return res;
-        }
-        camera3::metadataGetter getMetadata =
-                [this](const String8 &id, bool overrideForPerfClass) {
-                    CameraMetadata physicalDeviceInfo;
-                    getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
-                    return physicalDeviceInfo;
-                };
-        std::vector<std::string> physicalCameraIds;
-        isLogicalCameraLocked(cameraId, &physicalCameraIds);
-        bStatus =
-            SessionConfigurationUtils::convertToHALStreamCombination(
-                    cameraIdAndSessionConfig.mSessionConfiguration,
-                    String8(cameraId.c_str()), deviceInfo, getMetadata,
-                    physicalCameraIds, streamConfiguration,
-                    overrideForPerfClass, &shouldExit);
-        if (!bStatus.isOk()) {
-            ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
-            return INVALID_OPERATION;
-        }
-        if (shouldExit) {
-            *earlyExit = true;
-            return OK;
-        }
-        CameraIdAndStreamCombination halCameraIdAndStream;
-        halCameraIdAndStream.cameraId = cameraId;
-        halCameraIdAndStream.streamConfiguration = streamConfiguration;
-        halCameraIdsAndStreamsV.push_back(halCameraIdAndStream);
-    }
-    *halCameraIdsAndStreamCombinations = halCameraIdsAndStreamsV;
-    return OK;
-}
-
 // Checks if the containing vector of sets has any set that contains all of the
 // camera ids in cameraIdsAndSessionConfigs.
 static bool checkIfSetContainsAll(
@@ -3131,27 +2678,9 @@
     for (auto &provider : mProviders) {
         if (checkIfSetContainsAll(cameraIdsAndSessionConfigs,
                 provider->getConcurrentCameraIdCombinations())) {
-            // For each camera device in cameraIdsAndSessionConfigs collect
-            // the streamConfigs and create the HAL
-            // CameraIdAndStreamCombination, exit early if needed
-            hardware::hidl_vec<CameraIdAndStreamCombination> halCameraIdsAndStreamCombinations;
-            bool knowUnsupported = false;
-            status_t res = convertToHALStreamCombinationAndCameraIdsLocked(
-                    cameraIdsAndSessionConfigs, perfClassPrimaryCameraIds,
-                    targetSdkVersion, &halCameraIdsAndStreamCombinations, &knowUnsupported);
-            if (res != OK) {
-                ALOGE("%s unable to convert session configurations provided to HAL stream"
-                      "combinations", __FUNCTION__);
-                return res;
-            }
-            if (knowUnsupported) {
-                // We got to know the streams aren't valid before doing the HAL
-                // call itself.
-                *isSupported = false;
-                return OK;
-            }
             return provider->isConcurrentSessionConfigurationSupported(
-                    halCameraIdsAndStreamCombinations, isSupported);
+                    cameraIdsAndSessionConfigs, perfClassPrimaryCameraIds, targetSdkVersion,
+                    isSupported);
         }
     }
     *isSupported = false;
@@ -3161,7 +2690,7 @@
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics) const {
-    auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {3,0}, /*maxVersion*/ {5,0});
+    auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
         return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
     }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e3763a1..a66598d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -29,7 +29,12 @@
 #include <camera/CameraParameters2.h>
 #include <camera/CameraMetadata.h>
 #include <camera/CameraBase.h>
+#include <utils/Condition.h>
 #include <utils/Errors.h>
+#include <android/hardware/ICameraService.h>
+#include <utils/IPCTransport.h>
+#include <utils/SessionConfigurationUtils.h>
+#include <aidl/android/hardware/camera/provider/ICameraProvider.h>
 #include <android/hardware/camera/common/1.0/types.h>
 #include <android/hardware/camera/provider/2.5/ICameraProvider.h>
 #include <android/hardware/camera/provider/2.6/ICameraProviderCallback.h>
@@ -37,26 +42,28 @@
 #include <android/hardware/camera/provider/2.7/ICameraProvider.h>
 #include <android/hardware/camera/device/3.7/types.h>
 #include <android/hidl/manager/1.0/IServiceNotification.h>
+#include <binder/IServiceManager.h>
 #include <camera/VendorTagDescriptor.h>
 
 namespace android {
-/**
- * The vendor tag descriptor class that takes HIDL vendor tag information as
- * input. Not part of VendorTagDescriptor class because that class is used
- * in AIDL generated sources which don't have access to HIDL headers.
- */
-class HidlVendorTagDescriptor : public VendorTagDescriptor {
-public:
-    /**
-     * Create a VendorTagDescriptor object from the HIDL VendorTagSection
-     * vector.
-     *
-     * Returns OK on success, or a negative error code.
-     */
-    static status_t createDescriptorFromHidl(
-            const hardware::hidl_vec<hardware::camera::common::V1_0::VendorTagSection>& vts,
-            /*out*/
-            sp<VendorTagDescriptor>& descriptor);
+
+using hardware::camera2::utils::CameraIdAndSessionConfiguration;
+
+enum class CameraDeviceStatus : uint32_t {
+  NOT_PRESENT = 0,
+  PRESENT = 1,
+  ENUMERATING = 2
+};
+
+enum class TorchModeStatus : uint32_t {
+  NOT_AVAILABLE = 0,
+  AVAILABLE_OFF = 1,
+  AVAILABLE_ON = 2
+};
+
+struct CameraResourceCost {
+  uint32_t resourceCost;
+  std::vector<std::string> conflictingDevices;
 };
 
 enum SystemCameraKind {
@@ -90,6 +97,26 @@
 #define CAMERA_DEVICE_API_VERSION_3_7 HARDWARE_DEVICE_API_VERSION(3, 7)
 
 /**
+ * The vendor tag descriptor class that takes HIDL/AIDL vendor tag information as
+ * input. Not part of VendorTagDescriptor class because that class is used
+ * in AIDL generated sources which don't have access to AIDL / HIDL headers.
+ */
+class IdlVendorTagDescriptor : public VendorTagDescriptor {
+public:
+    /**
+     * Create a VendorTagDescriptor object from the HIDL/AIDL VendorTagSection
+     * vector.
+     *
+     * Returns OK on success, or a negative error code.
+     */
+    template <class VendorTagSectionVectorType, class VendorTagSectionType>
+    static status_t createDescriptorFromIdl(
+            const VendorTagSectionVectorType& vts,
+            /*out*/
+            sp<VendorTagDescriptor>& descriptor);
+};
+
+/**
  * A manager for all camera providers available on an Android device.
  *
  * Responsible for enumerating providers and the individual camera devices
@@ -99,14 +126,18 @@
  * opening them for active use.
  *
  */
-class CameraProviderManager : virtual public hidl::manager::V1_0::IServiceNotification {
+class CameraProviderManager : virtual public hidl::manager::V1_0::IServiceNotification,
+        public virtual IServiceManager::LocalRegistrationCallback {
 public:
-
+    // needs to be made friend strict since HidlProviderInfo needs to inherit
+    // from CameraProviderManager::ProviderInfo which isn't a public member.
+    friend struct HidlProviderInfo;
+    friend struct AidlProviderInfo;
     ~CameraProviderManager();
 
     // Tiny proxy for the static methods in a HIDL interface that communicate with the hardware
     // service manager, to be replacable in unit tests with a fake.
-    struct ServiceInteractionProxy {
+    struct HidlServiceInteractionProxy {
         virtual bool registerForNotifications(
                 const std::string &serviceName,
                 const sp<hidl::manager::V1_0::IServiceNotification>
@@ -118,12 +149,12 @@
         virtual sp<hardware::camera::provider::V2_4::ICameraProvider> getService(
                 const std::string &serviceName) = 0;
         virtual hardware::hidl_vec<hardware::hidl_string> listServices() = 0;
-        virtual ~ServiceInteractionProxy() {}
+        virtual ~HidlServiceInteractionProxy() {}
     };
 
     // Standard use case - call into the normal generated static methods which invoke
     // the real hardware service manager
-    struct HardwareServiceInteractionProxy : public ServiceInteractionProxy {
+    struct HidlServiceInteractionProxyImpl : public HidlServiceInteractionProxy {
         virtual bool registerForNotifications(
                 const std::string &serviceName,
                 const sp<hidl::manager::V1_0::IServiceNotification>
@@ -150,12 +181,15 @@
         ~StatusListener() {}
 
         virtual void onDeviceStatusChanged(const String8 &cameraId,
-                hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
+                CameraDeviceStatus newStatus) = 0;
         virtual void onDeviceStatusChanged(const String8 &cameraId,
                 const String8 &physicalCameraId,
-                hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
+                CameraDeviceStatus newStatus) = 0;
         virtual void onTorchStatusChanged(const String8 &cameraId,
-                hardware::camera::common::V1_0::TorchModeStatus newStatus) = 0;
+                TorchModeStatus newStatus,
+                SystemCameraKind kind) = 0;
+        virtual void onTorchStatusChanged(const String8 &cameraId,
+                TorchModeStatus newStatus) = 0;
         virtual void onNewProviderRegistered() = 0;
     };
 
@@ -175,7 +209,10 @@
      * used for testing. The lifetime of the proxy must exceed the lifetime of the manager.
      */
     status_t initialize(wp<StatusListener> listener,
-            ServiceInteractionProxy *proxy = &sHardwareServiceInteractionProxy);
+            HidlServiceInteractionProxy *hidlProxy = &sHidlServiceInteractionProxy);
+
+    status_t getCameraIdIPCTransport(const std::string &id,
+            IPCTransport *providerTransport) const;
 
     /**
      * Retrieve the total number of available cameras.
@@ -194,11 +231,6 @@
     std::vector<std::string> getAPI1CompatibleCameraDeviceIds() const;
 
     /**
-     * Return true if a device with a given ID and major version exists
-     */
-    bool isValidDevice(const std::string &id, uint16_t majorVersion) const;
-
-    /**
      * Return true if a device with a given ID has a flash unit. Returns false
      * for devices that are unknown.
      */
@@ -213,7 +245,7 @@
      * Return the resource cost of this camera device
      */
     status_t getResourceCost(const std::string &id,
-            hardware::camera::common::V1_0::CameraResourceCost* cost) const;
+            CameraResourceCost* cost) const;
 
     /**
      * Return the old camera API camera info
@@ -239,14 +271,15 @@
      * Check for device support of specific stream combination.
      */
     status_t isSessionConfigurationSupported(const std::string& id,
-            const hardware::camera::device::V3_7::StreamConfiguration &configuration,
+            const SessionConfiguration &configuration,
+            bool overrideForPerfClass, camera3::metadataGetter getMetadata,
             bool *status /*out*/) const;
 
     /**
      * Return the highest supported device interface version for this ID
      */
     status_t getHighestSupportedVersion(const std::string &id,
-            hardware::hidl_version *v);
+            hardware::hidl_version *v, IPCTransport *transport);
 
     /**
      * Check if a given camera device support setTorchMode API.
@@ -254,6 +287,17 @@
     bool supportSetTorchMode(const std::string &id) const;
 
     /**
+     * Check if torch strength update should be skipped or not.
+     */
+    bool shouldSkipTorchStrengthUpdate(const std::string &id, int32_t torchStrength) const;
+
+    /**
+     * Return the default torch strength level if the torch strength control
+     * feature is supported.
+     */
+    int32_t getTorchDefaultStrengthLevel(const std::string &id) const;
+
+    /**
      * Turn on or off the flashlight on a given camera device.
      * May fail if the device does not support this API, is in active use, or if the device
      * doesn't exist, etc.
@@ -261,6 +305,24 @@
     status_t setTorchMode(const std::string &id, bool enabled);
 
     /**
+     * Change the brightness level of the flash unit associated with the cameraId and
+     * set it to the value in torchStrength.
+     * If the torch is OFF and torchStrength > 0, the torch will be turned ON with the
+     * specified strength level. If the torch is ON, only the brightness level will be
+     * changed.
+     *
+     * This operation will fail if the device does not have flash unit, has flash unit
+     * but does not support this API, torchStrength is invalid or if the device doesn't
+     * exist etc.
+     */
+    status_t turnOnTorchWithStrengthLevel(const std::string &id, int32_t torchStrength);
+
+    /**
+     * Return the torch strength level of this camera device.
+     */
+    status_t getTorchStrengthLevel(const std::string &id, int32_t* torchStrength);
+
+    /**
      * Setup vendor tags for all registered providers
      */
     status_t setUpVendorTags();
@@ -268,8 +330,19 @@
     /**
      * Inform registered providers about a device state change, such as folding or unfolding
      */
-    status_t notifyDeviceStateChange(
-        android::hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState> newState);
+    status_t notifyDeviceStateChange(int64_t newState);
+
+    status_t openAidlSession(const std::string &id,
+        const std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraDeviceCallback>& callback,
+        /*out*/
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraDeviceSession> *session);
+
+    status_t openAidlInjectionSession(const std::string &id,
+        const std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraDeviceCallback>& callback,
+        /*out*/
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraInjectionSession> *session);
 
     /**
      * Open an active session to a camera device.
@@ -277,7 +350,7 @@
      * This fully powers on the camera device hardware, and returns a handle to a
      * session to be used for hardware configuration and operation.
      */
-    status_t openSession(const std::string &id,
+    status_t openHidlSession(const std::string &id,
             const sp<hardware::camera::device::V3_2::ICameraDeviceCallback>& callback,
             /*out*/
             sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session);
@@ -295,6 +368,9 @@
             const hardware::hidl_string& name,
             bool preexisting) override;
 
+    // LocalRegistrationCallback::onServiceRegistration
+    virtual void onServiceRegistration(const String16& name, const sp<IBinder> &binder) override;
+
     /**
      * Dump out information about available providers and devices
      */
@@ -309,9 +385,7 @@
     /*
      * Return provider type for a specific device.
      */
-    metadata_vendor_id_t getProviderTagIdLocked(const std::string& id,
-            hardware::hidl_version minVersion = hardware::hidl_version{0,0},
-            hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
+    metadata_vendor_id_t getProviderTagIdLocked(const std::string& id) const;
 
     /*
      * Check if a camera is a logical camera. And if yes, return
@@ -324,106 +398,91 @@
 
     status_t filterSmallJpegSizes(const std::string& cameraId);
 
+    status_t notifyUsbDeviceEvent(int32_t eventId, const std::string &usbDeviceId);
+
     static const float kDepthARTolerance;
 private:
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
     mutable std::mutex mInterfaceMutex;
 
-    // the status listener update callbacks will lock mStatusMutex
-    mutable std::mutex mStatusListenerMutex;
     wp<StatusListener> mListener;
-    ServiceInteractionProxy* mServiceProxy;
+    HidlServiceInteractionProxy* mHidlServiceProxy;
 
     // Current overall Android device physical status
-    android::hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState> mDeviceState;
+    int64_t mDeviceState;
 
     // mProviderLifecycleLock is locked during onRegistration and removeProvider
     mutable std::mutex mProviderLifecycleLock;
 
-    static HardwareServiceInteractionProxy sHardwareServiceInteractionProxy;
+    static HidlServiceInteractionProxyImpl sHidlServiceInteractionProxy;
+
+    struct HalCameraProvider {
+      // Empty parent struct for storing either aidl / hidl camera provider reference
+      HalCameraProvider(const char *descriptor) : mDescriptor(descriptor) { };
+      virtual ~HalCameraProvider() {};
+      std::string mDescriptor;
+    };
+
+    struct HidlHalCameraProvider : public HalCameraProvider {
+        HidlHalCameraProvider(
+                const sp<hardware::camera::provider::V2_4::ICameraProvider> &provider,
+                const char *descriptor) :
+                HalCameraProvider(descriptor), mCameraProvider(provider) { };
+     private:
+        sp<hardware::camera::provider::V2_4::ICameraProvider> mCameraProvider;
+    };
+
+    struct AidlHalCameraProvider : public HalCameraProvider {
+        AidlHalCameraProvider(
+                const std::shared_ptr<
+                        aidl::android::hardware::camera::provider::ICameraProvider> &provider,
+                const char *descriptor) :
+                HalCameraProvider(descriptor), mCameraProvider(provider) { };
+     private:
+        std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider> mCameraProvider;
+    };
+
 
     // Mapping from CameraDevice IDs to CameraProviders. This map is used to keep the
     // ICameraProvider alive while it is in use by the camera with the given ID for camera
     // capabilities
-    std::unordered_map<std::string, sp<hardware::camera::provider::V2_4::ICameraProvider>>
+    std::unordered_map<std::string, std::shared_ptr<HalCameraProvider>>
             mCameraProviderByCameraId;
 
     // Mapping from CameraDevice IDs to CameraProviders. This map is used to keep the
     // ICameraProvider alive while it is in use by the camera with the given ID for torch
     // capabilities
-    std::unordered_map<std::string, sp<hardware::camera::provider::V2_4::ICameraProvider>>
+    std::unordered_map<std::string, std::shared_ptr<HalCameraProvider>>
             mTorchProviderByCameraId;
 
     // Lock for accessing mCameraProviderByCameraId and mTorchProviderByCameraId
     std::mutex mProviderInterfaceMapLock;
-
-    struct ProviderInfo :
-            virtual public hardware::camera::provider::V2_6::ICameraProviderCallback,
-            virtual public hardware::hidl_death_recipient
-    {
+    struct ProviderInfo : public virtual RefBase {
+        friend struct HidlProviderInfo;
+        friend struct AidlProviderInfo;
         const std::string mProviderName;
         const std::string mProviderInstance;
         const metadata_vendor_id_t mProviderTagid;
-        int mMinorVersion;
+        int32_t mMinorVersion;
         sp<VendorTagDescriptor> mVendorTagDescriptor;
         bool mSetTorchModeSupported;
         bool mIsRemote;
 
-        // Current overall Android device physical status
-        hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState> mDeviceState;
-
-        // This pointer is used to keep a reference to the ICameraProvider that was last accessed.
-        wp<hardware::camera::provider::V2_4::ICameraProvider> mActiveInterface;
-
-        sp<hardware::camera::provider::V2_4::ICameraProvider> mSavedInterface;
-
         ProviderInfo(const std::string &providerName, const std::string &providerInstance,
                 CameraProviderManager *manager);
         ~ProviderInfo();
 
-        status_t initialize(sp<hardware::camera::provider::V2_4::ICameraProvider>& interface,
-                hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState>
-                    currentDeviceState);
-
-        const sp<hardware::camera::provider::V2_4::ICameraProvider> startProviderInterface();
+        virtual IPCTransport getIPCTransport() = 0;
 
         const std::string& getType() const;
 
-        status_t addDevice(const std::string& name,
-                hardware::camera::common::V1_0::CameraDeviceStatus initialStatus =
-                hardware::camera::common::V1_0::CameraDeviceStatus::PRESENT,
-                /*out*/ std::string *parsedId = nullptr);
-
         status_t dump(int fd, const Vector<String16>& args) const;
 
-        // ICameraProviderCallbacks interface - these lock the parent mInterfaceMutex
-        hardware::Return<void> cameraDeviceStatusChange(
-                const hardware::hidl_string& cameraDeviceName,
-                hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
-        hardware::Return<void> torchModeStatusChange(
-                const hardware::hidl_string& cameraDeviceName,
-                hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
-        hardware::Return<void> physicalCameraDeviceStatusChange(
-                const hardware::hidl_string& cameraDeviceName,
-                const hardware::hidl_string& physicalCameraDeviceName,
-                hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
-
-        status_t cameraDeviceStatusChangeLocked(
-                std::string* id, const hardware::hidl_string& cameraDeviceName,
-                hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
-        status_t physicalCameraDeviceStatusChangeLocked(
-                std::string* id, std::string* physicalId,
-                const hardware::hidl_string& cameraDeviceName,
-                const hardware::hidl_string& physicalCameraDeviceName,
-                hardware::camera::common::V1_0::CameraDeviceStatus newStatus);
-
-        // hidl_death_recipient interface - this locks the parent mInterfaceMutex
-        virtual void serviceDied(uint64_t cookie, const wp<hidl::base::V1_0::IBase>& who) override;
-
+        void initializeProviderInfoCommon(const std::vector<std::string> &devices);
         /**
          * Setup vendor tags for this provider
          */
-        status_t setUpVendorTags();
+        virtual status_t setUpVendorTags() = 0;
 
         /**
          * Notify provider about top-level device physical state changes
@@ -432,9 +491,11 @@
          * It is possible for camera providers to add/remove devices and try to
          * acquire it.
          */
-        status_t notifyDeviceStateChange(
-                hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState>
-                    newDeviceState);
+        virtual status_t notifyDeviceStateChange(int64_t newDeviceState) = 0;
+
+        virtual bool successfullyStartedProviderInterface() = 0;
+
+        virtual int64_t getDeviceState() = 0;
 
         std::vector<std::unordered_set<std::string>> getConcurrentCameraIdCombinations();
 
@@ -443,40 +504,62 @@
          *
          * Note that 'mInterfaceMutex' should be held when calling this method.
          */
-        void notifyDeviceInfoStateChangeLocked(
-               hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState>
-                   newDeviceState);
+        void notifyDeviceInfoStateChangeLocked(int64_t newDeviceState);
 
         /**
          * Query the camera provider for concurrent stream configuration support
          */
-        status_t isConcurrentSessionConfigurationSupported(
-                const hardware::hidl_vec<
-                        hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
-                                &halCameraIdsAndStreamCombinations,
-                bool *isSupported);
+        virtual status_t isConcurrentSessionConfigurationSupported(
+                    const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+                    const std::set<std::string>& perfClassPrimaryCameraIds,
+                    int targetSdkVersion, bool *isSupported) = 0;
+
+        /**
+         * Remove all devices associated with this provider and notify listeners
+         * with NOT_PRESENT state.
+         */
+        void removeAllDevices();
+
+        /**
+         * Provider is an external lazy HAL
+         */
+        bool isExternalLazyHAL() const;
 
         // Basic device information, common to all camera devices
         struct DeviceInfo {
             const std::string mName;  // Full instance name
             const std::string mId;    // ID section of full name
+            //Both hidl and aidl DeviceInfos. Aidl deviceInfos get {3, 8} to
+            //start off.
             const hardware::hidl_version mVersion;
             const metadata_vendor_id_t mProviderTagid;
             bool mIsLogicalCamera;
             std::vector<std::string> mPhysicalIds;
             hardware::CameraInfo mInfo;
-            sp<IBase> mSavedInterface;
             SystemCameraKind mSystemCameraKind = SystemCameraKind::PUBLIC;
 
-            const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
+            const CameraResourceCost mResourceCost;
 
-            hardware::camera::common::V1_0::CameraDeviceStatus mStatus;
+            CameraDeviceStatus mStatus;
 
             wp<ProviderInfo> mParentProvider;
+            // Torch strength default, maximum levels if the torch strength control
+            // feature is supported.
+            int32_t mTorchStrengthLevel;
+            int32_t mTorchMaximumStrengthLevel;
+            int32_t mTorchDefaultStrengthLevel;
+
+            // Wait for lazy HALs to confirm device availability
+            static const nsecs_t kDeviceAvailableTimeout = 2000e6; // 2000 ms
+            Mutex     mDeviceAvailableLock;
+            Condition mDeviceAvailableSignal;
+            bool mIsDeviceAvailable = true;
 
             bool hasFlashUnit() const { return mHasFlashUnit; }
             bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
             virtual status_t setTorchMode(bool enabled) = 0;
+            virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
+            virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
             virtual status_t dumpState(int fd) = 0;
@@ -494,44 +577,33 @@
             }
 
             virtual status_t isSessionConfigurationSupported(
-                    const hardware::camera::device::V3_7::StreamConfiguration &/*configuration*/,
+                    const SessionConfiguration &/*configuration*/,
+                    bool /*overrideForPerfClass*/,
+                    camera3::metadataGetter /*getMetadata*/,
                     bool * /*status*/) {
                 return INVALID_OPERATION;
             }
             virtual status_t filterSmallJpegSizes() = 0;
-            virtual void notifyDeviceStateChange(
-                    hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState>
-                        /*newState*/) {}
-
-            template<class InterfaceT>
-            sp<InterfaceT> startDeviceInterface();
+            virtual void notifyDeviceStateChange(int64_t /*newState*/) {}
 
             DeviceInfo(const std::string& name, const metadata_vendor_id_t tagId,
                     const std::string &id, const hardware::hidl_version& version,
                     const std::vector<std::string>& publicCameraIds,
-                    const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
+                    const CameraResourceCost& resourceCost,
                     sp<ProviderInfo> parentProvider) :
                     mName(name), mId(id), mVersion(version), mProviderTagid(tagId),
                     mIsLogicalCamera(false), mResourceCost(resourceCost),
-                    mStatus(hardware::camera::common::V1_0::CameraDeviceStatus::PRESENT),
-                    mParentProvider(parentProvider), mHasFlashUnit(false),
-                    mSupportNativeZoomRatio(false), mPublicCameraIds(publicCameraIds) {}
-            virtual ~DeviceInfo();
+                    mStatus(CameraDeviceStatus::PRESENT),
+                    mParentProvider(parentProvider), mTorchStrengthLevel(0),
+                    mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
+                    mHasFlashUnit(false), mSupportNativeZoomRatio(false),
+                    mPublicCameraIds(publicCameraIds) {}
+            virtual ~DeviceInfo() {}
         protected:
+
             bool mHasFlashUnit; // const after constructor
             bool mSupportNativeZoomRatio; // const after constructor
             const std::vector<std::string>& mPublicCameraIds;
-
-            template<class InterfaceT>
-            static status_t setTorchMode(InterfaceT& interface, bool enabled);
-
-            template<class InterfaceT>
-            status_t setTorchModeForDevice(bool enabled) {
-                // Don't save the ICameraProvider interface here because we assume that this was
-                // called from CameraProviderManager::setTorchMode(), which does save it.
-                const sp<InterfaceT> interface = startDeviceInterface<InterfaceT>();
-                return DeviceInfo::setTorchMode(interface, enabled);
-            }
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
@@ -546,43 +618,47 @@
 
         // HALv3-specific camera fields, including the actual device interface
         struct DeviceInfo3 : public DeviceInfo {
-            typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
 
-            virtual status_t setTorchMode(bool enabled) override;
+            virtual status_t setTorchMode(bool enabled) = 0;
+            virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
+            virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
             virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
-            virtual status_t dumpState(int fd) override;
+            virtual status_t dumpState(int fd) = 0;
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
                     CameraMetadata *characteristics) const override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t isSessionConfigurationSupported(
-                    const hardware::camera::device::V3_7::StreamConfiguration &configuration,
-                    bool *status /*out*/)
-                    override;
+                    const SessionConfiguration &configuration, bool /*overrideForPerfClass*/,
+                    camera3::metadataGetter /*getMetadata*/,
+                    bool *status /*out*/) = 0;
             virtual status_t filterSmallJpegSizes() override;
             virtual void notifyDeviceStateChange(
-                    hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState>
-                        newState) override;
+                        int64_t newState) override;
 
             DeviceInfo3(const std::string& name, const metadata_vendor_id_t tagId,
                     const std::string &id, uint16_t minorVersion,
-                    const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
+                    const CameraResourceCost& resourceCost,
                     sp<ProviderInfo> parentProvider,
-                    const std::vector<std::string>& publicCameraIds, sp<InterfaceT> interface);
-            virtual ~DeviceInfo3();
-        private:
+                    const std::vector<std::string>& publicCameraIds);
+            virtual ~DeviceInfo3() {};
+        protected:
+            // Modified by derived transport specific (hidl / aidl) class
             CameraMetadata mCameraCharacteristics;
             // Map device states to sensor orientations
             std::unordered_map<int64_t, int32_t> mDeviceStateOrientationMap;
             // A copy of mCameraCharacteristics without performance class
             // override
             std::unique_ptr<CameraMetadata> mCameraCharNoPCOverride;
+            // Only contains characteristics for hidden physical cameras,
+            // not for public physical cameras.
             std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
             void queryPhysicalCameraIds();
             SystemCameraKind getSystemCameraKind();
             status_t fixupMonochromeTags();
+            status_t fixupTorchStrengthTags();
             status_t addDynamicDepthTags(bool maxResolution = false);
             status_t deriveHeicTags(bool maxResolution = false);
             status_t addRotateCropTags();
@@ -612,8 +688,7 @@
                     const camera_metadata_entry& halStreamConfigs,
                     const camera_metadata_entry& halStreamDurations);
         };
-
-    private:
+    protected:
         std::string mType;
         uint32_t mId;
 
@@ -623,12 +698,12 @@
 
         struct CameraStatusInfoT {
             bool isPhysicalCameraStatus = false;
-            hardware::hidl_string cameraId;
-            hardware::hidl_string physicalCameraId;
-            hardware::camera::common::V1_0::CameraDeviceStatus status;
-            CameraStatusInfoT(bool isForPhysicalCamera, const hardware::hidl_string& id,
-                    const hardware::hidl_string& physicalId,
-                    hardware::camera::common::V1_0::CameraDeviceStatus s) :
+            std::string cameraId;
+            std::string physicalCameraId;
+            CameraDeviceStatus status;
+            CameraStatusInfoT(bool isForPhysicalCamera, const std::string& id,
+                    const std::string& physicalId,
+                    CameraDeviceStatus s) :
                     isPhysicalCameraStatus(isForPhysicalCamera), cameraId(id),
                     physicalCameraId(physicalId), status(s) {}
         };
@@ -640,22 +715,19 @@
         // End of scope for mInitLock
 
         std::future<void> mInitialStatusCallbackFuture;
+
+        std::unique_ptr<ProviderInfo::DeviceInfo>
+        virtual initializeDeviceInfo(
+                const std::string &name, const metadata_vendor_id_t tagId,
+                const std::string &id, uint16_t minorVersion) = 0;
+
+        virtual status_t reCacheConcurrentStreamingCameraIdsLocked() = 0;
+
         void notifyInitialStatusChange(sp<StatusListener> listener,
                 std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
 
         std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
 
-        // Templated method to instantiate the right kind of DeviceInfo and call the
-        // right CameraProvider getCameraDeviceInterface_* method.
-        template<class DeviceInfoT>
-        std::unique_ptr<DeviceInfo> initializeDeviceInfo(const std::string &name,
-                const metadata_vendor_id_t tagId, const std::string &id,
-                uint16_t minorVersion);
-
-        // Helper for initializeDeviceInfo to use the right CameraProvider get method.
-        template<class InterfaceT>
-        sp<InterfaceT> startDeviceInterface(const std::string &name);
-
         // Parse provider instance name for type and id
         static status_t parseProviderName(const std::string& name,
                 std::string *type, uint32_t *id);
@@ -667,52 +739,89 @@
         // Generate vendor tag id
         static metadata_vendor_id_t generateVendorTagId(const std::string &name);
 
+        status_t addDevice(
+                const std::string& name, CameraDeviceStatus initialStatus,
+                /*out*/ std::string* parsedId);
+
+        void cameraDeviceStatusChangeInternal(const std::string& cameraDeviceName,
+                CameraDeviceStatus newStatus);
+
+        status_t cameraDeviceStatusChangeLocked(
+                std::string* id, const std::string& cameraDeviceName,
+                CameraDeviceStatus newStatus);
+
+        void physicalCameraDeviceStatusChangeInternal(const std::string& cameraDeviceName,
+                const std::string& physicalCameraDeviceName,
+                CameraDeviceStatus newStatus);
+
+      status_t physicalCameraDeviceStatusChangeLocked(
+            std::string* id, std::string* physicalId,
+            const std::string& cameraDeviceName,
+            const std::string& physicalCameraDeviceName,
+            CameraDeviceStatus newStatus);
+
+        void torchModeStatusChangeInternal(const std::string& cameraDeviceName,
+                TorchModeStatus newStatus);
+
         void removeDevice(std::string id);
 
-        // Expects to have mLock locked
-        status_t reCacheConcurrentStreamingCameraIdsLocked();
-        // Expects to have mLock locked
-        status_t getConcurrentCameraIdsInternalLocked(
-                sp<hardware::camera::provider::V2_6::ICameraProvider> &interface2_6);
     };
 
+    template <class ProviderInfoType, class HalCameraProviderType>
+    status_t setTorchModeT(sp<ProviderInfo> &parentProvider,
+            std::shared_ptr<HalCameraProvider> *halCameraProvider);
+
+    // Try to get hidl provider services declared. Expects mInterfaceMutex to be
+    // locked. Also registers for hidl provider service notifications.
+    status_t tryToInitAndAddHidlProvidersLocked(HidlServiceInteractionProxy *hidlProxy);
+
+    // Try to get aidl provider services declared. Expects mInterfaceMutex to be
+    // locked. Also registers for aidl provider service notifications.
+    status_t tryToAddAidlProvidersLocked();
+
     /**
      * Save the ICameraProvider while it is being used by a camera or torch client
      */
     void saveRef(DeviceMode usageType, const std::string &cameraId,
-            sp<hardware::camera::provider::V2_4::ICameraProvider> provider);
+            std::shared_ptr<HalCameraProvider> provider);
 
     // Utility to find a DeviceInfo by ID; pointer is only valid while mInterfaceMutex is held
     // and the calling code doesn't mutate the list of providers or their lists of devices.
-    // Finds the first device of the given ID that falls within the requested version range
-    //   minVersion <= deviceVersion < maxVersion
     // No guarantees on the order of traversal
-    ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id,
-            hardware::hidl_version minVersion = hardware::hidl_version{0,0},
-            hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
+    ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
 
-    status_t addProviderLocked(const std::string& newProvider, bool preexisting = false);
+    // Map external providers to USB devices in order to handle USB hotplug
+    // events for lazy HALs
+    std::pair<std::vector<std::string>, sp<ProviderInfo>>
+        mExternalUsbDevicesForProvider;
+    sp<ProviderInfo> startExternalLazyProvider() const;
 
-    status_t tryToInitializeProviderLocked(const std::string& providerName,
+    status_t addHidlProviderLocked(const std::string& newProvider, bool preexisting = false);
+
+    status_t addAidlProviderLocked(const std::string& newProvider);
+
+    status_t tryToInitializeHidlProviderLocked(const std::string& providerName,
+            const sp<ProviderInfo>& providerInfo);
+
+    status_t tryToInitializeAidlProviderLocked(const std::string& providerName,
             const sp<ProviderInfo>& providerInfo);
 
     bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
+    // No method corresponding to the same provider / member belonging to the
+    // same provider should be used after this method is called since it'll lead
+    // to invalid memory access (especially since this is called by ProviderInfo methods on hal
+    // service death).
     status_t removeProvider(const std::string& provider);
     sp<StatusListener> getStatusListener() const;
 
-    bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
+    bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion,
+            IPCTransport transport) const;
 
     size_t mProviderInstanceId = 0;
     std::vector<sp<ProviderInfo>> mProviders;
-
-    void addProviderToMap(
-            const std::string &cameraId,
-            sp<hardware::camera::provider::V2_4::ICameraProvider> provider,
-            bool isTorchUsage);
-    void removeCameraIdFromMap(
-        std::unordered_map<std::string, sp<hardware::camera::provider::V2_4::ICameraProvider>> &map,
-        const std::string &cameraId);
+    // Provider names of AIDL providers with retrieved binders.
+    std::set<std::string> mAidlProviderWithBinders;
 
     static const char* deviceStatusToString(
         const hardware::camera::common::V1_0::CameraDeviceStatus&);
@@ -724,20 +833,16 @@
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
-    std::pair<bool, ProviderInfo::DeviceInfo *> isHiddenPhysicalCameraInternal(const std::string& cameraId) const;
+    std::pair<bool, ProviderInfo::DeviceInfo *> isHiddenPhysicalCameraInternal(
+            const std::string& cameraId) const;
 
     void collectDeviceIdsLocked(const std::vector<std::string> deviceIds,
             std::vector<std::string>& normalDeviceIds,
             std::vector<std::string>& systemCameraDeviceIds) const;
 
-    status_t convertToHALStreamCombinationAndCameraIdsLocked(
-              const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
-                      &cameraIdsAndSessionConfigs,
-              const std::set<std::string>& perfClassPrimaryCameraIds,
-              int targetSdkVersion,
-              hardware::hidl_vec<hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
-                      *halCameraIdsAndStreamCombinations,
-              bool *earlyExit);
+    status_t usbDeviceDetached(const std::string &usbDeviceId);
+    ndk::ScopedAStatus onAidlRegistration(const std::string& in_name,
+            const ::ndk::SpAIBinder& in_binder);
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index c995670..719ff2c 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -42,6 +42,10 @@
 #include <xmpmeta/xmp_data.h>
 #include <xmpmeta/xmp_writer.h>
 
+#ifndef __unused
+#define __unused __attribute__((__unused__))
+#endif
+
 using dynamic_depth::Camera;
 using dynamic_depth::Cameras;
 using dynamic_depth::CameraParams;
diff --git a/services/camera/libcameraservice/common/HalConversionsTemplated.h b/services/camera/libcameraservice/common/HalConversionsTemplated.h
new file mode 100644
index 0000000..96a715c
--- /dev/null
+++ b/services/camera/libcameraservice/common/HalConversionsTemplated.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_HAL_CONVERSION_TEMPLATED_H
+#define ANDROID_SERVERS_CAMERA_HAL_CONVERSION_TEMPLATED_H
+
+#include "common/CameraProviderManager.h"
+
+#include <device3/Camera3StreamInterface.h>
+
+namespace android {
+
+template <class HalCameraDeviceStatus>
+HalCameraDeviceStatus mapFrameworkToHalCameraDeviceStatus(
+        const CameraDeviceStatus& s)  {
+    switch(s) {
+        case CameraDeviceStatus::PRESENT:
+            return HalCameraDeviceStatus::PRESENT;
+        case CameraDeviceStatus::NOT_PRESENT:
+            return HalCameraDeviceStatus::NOT_PRESENT;
+        case CameraDeviceStatus::ENUMERATING:
+            return HalCameraDeviceStatus::ENUMERATING;
+    }
+    ALOGW("Unexpectedcamera device status code %d", s);
+    return HalCameraDeviceStatus::NOT_PRESENT;
+}
+
+template <class HalCameraDeviceStatus>
+CameraDeviceStatus HalToFrameworkCameraDeviceStatus(
+        const HalCameraDeviceStatus& s)  {
+    switch(s) {
+        case HalCameraDeviceStatus::PRESENT:
+            return CameraDeviceStatus::PRESENT;
+        case HalCameraDeviceStatus::NOT_PRESENT:
+            return CameraDeviceStatus::NOT_PRESENT;
+        case HalCameraDeviceStatus::ENUMERATING:
+            return CameraDeviceStatus::ENUMERATING;
+    }
+    ALOGW("Unexpectedcamera device status code %d", s);
+    return CameraDeviceStatus::NOT_PRESENT;
+}
+
+template <class HalCameraResourceCost>
+CameraResourceCost HalToFrameworkResourceCost(
+        const HalCameraResourceCost& s)  {
+    CameraResourceCost internalResourceCost;
+    internalResourceCost.resourceCost = (uint32_t)s.resourceCost;
+    for (const auto device : s.conflictingDevices) {
+        internalResourceCost.conflictingDevices.emplace_back(device.c_str());
+    }
+    return internalResourceCost;
+}
+
+template <class HalTorchModeStatus>
+TorchModeStatus HalToFrameworkTorchModeStatus(
+        const HalTorchModeStatus& s)  {
+    switch(s) {
+        case HalTorchModeStatus::NOT_AVAILABLE:
+            return TorchModeStatus::NOT_AVAILABLE;
+        case HalTorchModeStatus::AVAILABLE_OFF:
+            return TorchModeStatus::AVAILABLE_OFF;
+        case HalTorchModeStatus::AVAILABLE_ON:
+            return TorchModeStatus::AVAILABLE_ON;
+    }
+    ALOGW("Unexpectedcamera torch mode status code %d", s);
+    return TorchModeStatus::NOT_AVAILABLE;
+}
+
+template <class HalCameraDeviceStatus>
+ const char* HalDeviceStatusToString(const HalCameraDeviceStatus& s) {
+    switch(s) {
+        case HalCameraDeviceStatus::NOT_PRESENT:
+            return "NOT_PRESENT";
+        case HalCameraDeviceStatus::PRESENT:
+            return "PRESENT";
+        case HalCameraDeviceStatus::ENUMERATING:
+            return "ENUMERATING";
+    }
+    ALOGW("Unexpected HAL device status code %d", s);
+    return "UNKNOWN_STATUS";
+}
+
+}
+
+#endif
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
new file mode 100644
index 0000000..81b4779
--- /dev/null
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -0,0 +1,832 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "AidlProviderInfo.h"
+#include "common/HalConversionsTemplated.h"
+#include "common/CameraProviderInfoTemplated.h"
+
+#include <cutils/properties.h>
+
+#include <aidlcommonsupport/NativeHandle.h>
+#include <android/binder_manager.h>
+#include <android/hardware/ICameraService.h>
+#include <camera_metadata_hidden.h>
+
+#include "device3/ZoomRatioMapper.h"
+#include <utils/SessionConfigurationUtils.h>
+#include <utils/Trace.h>
+
+namespace {
+const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
+} // anonymous namespace
+
+namespace android {
+
+namespace SessionConfigurationUtils = ::android::camera3::SessionConfigurationUtils;
+
+using namespace aidl::android::hardware;
+using namespace hardware::camera;
+using hardware::camera2::utils::CameraIdAndSessionConfiguration;
+using hardware::ICameraService;
+
+using HalDeviceStatusType = aidl::android::hardware::camera::common::CameraDeviceStatus;
+using ICameraProvider = aidl::android::hardware::camera::provider::ICameraProvider;
+using StatusListener = CameraProviderManager::StatusListener;
+
+static status_t mapExceptionCodeToStatusT(binder_exception_t binderException) {
+    switch (binderException) {
+        case EX_NONE:
+            return OK;
+        case EX_ILLEGAL_ARGUMENT:
+        case EX_NULL_POINTER:
+        case EX_BAD_PARCELABLE:
+        case EX_ILLEGAL_STATE:
+            return BAD_VALUE;
+        case EX_UNSUPPORTED_OPERATION:
+            return INVALID_OPERATION;
+        case EX_TRANSACTION_FAILED:
+            return DEAD_OBJECT;
+        default:
+            return UNKNOWN_ERROR;
+    }
+}
+
+status_t AidlProviderInfo::mapToStatusT(const ndk::ScopedAStatus& s) {
+    using Status = aidl::android::hardware::camera::common::Status;
+    auto exceptionCode = s.getExceptionCode();
+    if (exceptionCode != EX_SERVICE_SPECIFIC) {
+        return mapExceptionCodeToStatusT(exceptionCode);
+    }
+    Status st = static_cast<Status>(s.getServiceSpecificError());
+    switch (st) {
+        case Status::OK:
+            return OK;
+        case Status::ILLEGAL_ARGUMENT:
+            return BAD_VALUE;
+        case Status::CAMERA_IN_USE:
+            return -EBUSY;
+        case Status::MAX_CAMERAS_IN_USE:
+            return -EUSERS;
+        case Status::OPERATION_NOT_SUPPORTED:
+            return INVALID_OPERATION;
+        case Status::CAMERA_DISCONNECTED:
+            return DEAD_OBJECT;
+        case Status::INTERNAL_ERROR:
+            return INVALID_OPERATION;
+    }
+    ALOGW("Unexpected HAL status code %d", static_cast<int>(st));
+    return INVALID_OPERATION;
+}
+
+AidlProviderInfo::AidlProviderInfo(
+            const std::string &providerName,
+            const std::string &providerInstance,
+            CameraProviderManager *manager) :
+            CameraProviderManager::ProviderInfo(providerName, providerInstance, manager) {}
+
+status_t AidlProviderInfo::initializeAidlProvider(
+        std::shared_ptr<ICameraProvider>& interface, int64_t currentDeviceState) {
+
+    status_t res = parseProviderName(mProviderName, &mType, &mId);
+    if (res != OK) {
+        ALOGE("%s: Invalid provider name, ignoring", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    ALOGI("Connecting to new camera provider: %s, isRemote? %d",
+            mProviderName.c_str(), interface->isRemote());
+
+    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+    // before setCallback returns
+    mCallbacks =
+            ndk::SharedRefBase::make<AidlProviderCallbacks>(this);
+    ndk::ScopedAStatus status =
+            interface->setCallback(mCallbacks);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), status.getMessage());
+        return mapToStatusT(status);
+    }
+
+    mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(AIBinder_DeathRecipient_new(binderDied));
+    auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(), this);
+    if (link != STATUS_OK) {
+        ALOGW("%s: Unable to link to provider '%s' death notifications",
+                __FUNCTION__, mProviderName.c_str());
+        return DEAD_OBJECT;
+    }
+
+    if (!kEnableLazyHal) {
+        // Save HAL reference indefinitely
+        mSavedInterface = interface;
+    } else {
+        mActiveInterface = interface;
+    }
+
+    ALOGV("%s: Setting device state for %s: 0x%" PRIx64,
+            __FUNCTION__, mProviderName.c_str(), mDeviceState);
+    notifyDeviceStateChange(currentDeviceState);
+
+    res = setUpVendorTags();
+    if (res != OK) {
+        ALOGE("%s: Unable to set up vendor tags from provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return res;
+     }
+
+    // Get initial list of camera devices, if any
+    std::vector<std::string> devices;
+    std::vector<std::string> retDevices;
+    status = interface->getCameraIdList(&retDevices);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error in getting camera ID list from provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), status.getMessage());
+        return mapToStatusT(status);
+    }
+
+    for (auto& name : retDevices) {
+        uint16_t major, minor;
+        std::string type, id;
+        status_t res = parseDeviceName(name, &major, &minor, &type, &id);
+        if (res != OK) {
+            ALOGE("%s: Error parsing deviceName: %s: %d", __FUNCTION__, name.c_str(), res);
+            return res;
+        } else {
+            devices.push_back(name);
+            mProviderPublicCameraIds.push_back(id);
+        }
+    }
+
+    // Get list of concurrent streaming camera device combinations
+    res = getConcurrentCameraIdsInternalLocked(interface);
+    if (res != OK) {
+        return res;
+    }
+
+    mSetTorchModeSupported = true;
+
+    mIsRemote = interface->isRemote();
+
+    initializeProviderInfoCommon(devices);
+    return OK;
+}
+
+void AidlProviderInfo::binderDied(void *cookie) {
+    AidlProviderInfo *provider = reinterpret_cast<AidlProviderInfo *>(cookie);
+    ALOGI("Camera provider '%s' has died; removing it", provider->mProviderInstance.c_str());
+    provider->mManager->removeProvider(provider->mProviderInstance);
+}
+
+status_t AidlProviderInfo::setUpVendorTags() {
+    if (mVendorTagDescriptor != nullptr)
+        return OK;
+
+    std::vector<camera::common::VendorTagSection> vts;
+    ::ndk::ScopedAStatus status;
+    const std::shared_ptr<ICameraProvider> interface = startProviderInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+    status = interface->getVendorTags(&vts);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error getting vendor tags from provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), status.getMessage());
+        return mapToStatusT(status);
+    }
+
+    // Read all vendor tag definitions into a descriptor
+    status_t res;
+    if ((res =
+            IdlVendorTagDescriptor::
+                    createDescriptorFromIdl<std::vector<camera::common::VendorTagSection>,
+                            camera::common::VendorTagSection>(vts, /*out*/mVendorTagDescriptor))
+            != OK) {
+        ALOGE("%s: Could not generate descriptor from vendor tag operations,"
+                "received error %s (%d). Camera clients will not be able to use"
+                "vendor tags", __FUNCTION__, strerror(res), res);
+        return res;
+    }
+
+    return OK;
+}
+
+status_t AidlProviderInfo::notifyDeviceStateChange(int64_t newDeviceState) {
+
+    mDeviceState = newDeviceState;
+    // Check if the provider is currently active - not going to start it up for this notification
+    auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.lock();
+    if (interface != nullptr) {
+        // Send current device state
+        interface->notifyDeviceStateChange(mDeviceState);
+    }
+    return OK;
+}
+
+bool AidlProviderInfo::successfullyStartedProviderInterface() {
+    return startProviderInterface() != nullptr;
+}
+
+std::shared_ptr<camera::device::ICameraDevice>
+AidlProviderInfo::startDeviceInterface(const std::string &name) {
+    ::ndk::ScopedAStatus status;
+    std::shared_ptr<camera::device::ICameraDevice> cameraInterface;
+    const std::shared_ptr<ICameraProvider> interface = startProviderInterface();
+    if (interface == nullptr) {
+        return nullptr;
+    }
+    status = interface->getCameraDeviceInterface(name, &cameraInterface);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
+                __FUNCTION__, name.c_str(), status.getMessage());
+        return nullptr;
+    }
+    return cameraInterface;
+}
+
+const std::shared_ptr<ICameraProvider> AidlProviderInfo::startProviderInterface() {
+    ATRACE_CALL();
+    ALOGV("Request to start camera provider: %s", mProviderName.c_str());
+    if (mSavedInterface != nullptr) {
+        return mSavedInterface;
+    }
+    if (!kEnableLazyHal) {
+        ALOGE("Bad provider state! Should not be here on a non-lazy HAL!");
+        return nullptr;
+    }
+
+    auto interface = mActiveInterface.lock();
+    if (interface == nullptr) {
+        // Try to get service without starting
+        interface =
+                    ICameraProvider::fromBinder(
+                            ndk::SpAIBinder(AServiceManager_checkService(mProviderName.c_str())));
+        if (interface == nullptr) {
+            ALOGV("Camera provider actually needs restart, calling getService(%s)",
+                  mProviderName.c_str());
+            interface =
+                            ICameraProvider::fromBinder(
+                                    ndk::SpAIBinder(
+                                                AServiceManager_getService(mProviderName.c_str())));
+
+            // Set all devices as ENUMERATING, provider should update status
+            // to PRESENT after initializing.
+            // This avoids failing getCameraDeviceInterface_V3_x before devices
+            // are ready.
+            for (auto& device : mDevices) {
+              device->mIsDeviceAvailable = false;
+            }
+
+            interface->setCallback(mCallbacks);
+            auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(),
+                    this);
+            if (link != STATUS_OK) {
+                ALOGW("%s: Unable to link to provider '%s' death notifications",
+                        __FUNCTION__, mProviderName.c_str());
+                mManager->removeProvider(mProviderName);
+                return nullptr;
+            }
+
+            // Send current device state
+            interface->notifyDeviceStateChange(mDeviceState);
+        }
+        mActiveInterface = interface;
+    } else {
+        ALOGV("Camera provider (%s) already in use. Re-using instance.",
+              mProviderName.c_str());
+    }
+
+    return interface;
+}
+
+::ndk::ScopedAStatus AidlProviderInfo::AidlProviderCallbacks::cameraDeviceStatusChange(
+    const std::string& cameraDeviceName,
+    HalDeviceStatusType newStatus) {
+    sp<AidlProviderInfo> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: Parent provider not alive", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return parent->cameraDeviceStatusChange(cameraDeviceName, newStatus);
+}
+
+::ndk::ScopedAStatus AidlProviderInfo::AidlProviderCallbacks::torchModeStatusChange(
+            const std::string& cameraDeviceName,
+            aidl::android::hardware::camera::common::TorchModeStatus newStatus) {
+    sp<AidlProviderInfo> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: Parent provider not alive", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return parent->torchModeStatusChange(cameraDeviceName, newStatus);
+
+};
+
+::ndk::ScopedAStatus AidlProviderInfo::AidlProviderCallbacks::physicalCameraDeviceStatusChange(
+            const std::string& cameraDeviceName,
+            const std::string& physicalCameraDeviceName,
+            HalDeviceStatusType newStatus) {
+    sp<AidlProviderInfo> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: Parent provider not alive", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return parent->physicalCameraDeviceStatusChange(cameraDeviceName, physicalCameraDeviceName,
+            newStatus);
+};
+
+::ndk::ScopedAStatus AidlProviderInfo::cameraDeviceStatusChange(const std::string& cameraDeviceName,
+            HalDeviceStatusType newStatus) {
+    cameraDeviceStatusChangeInternal(cameraDeviceName, HalToFrameworkCameraDeviceStatus(newStatus));
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus AidlProviderInfo::torchModeStatusChange(const std::string& cameraDeviceName,
+            aidl::android::hardware::camera::common::TorchModeStatus newStatus) {
+    torchModeStatusChangeInternal(cameraDeviceName, HalToFrameworkTorchModeStatus(newStatus));
+    return ::ndk::ScopedAStatus::ok();
+};
+
+::ndk::ScopedAStatus AidlProviderInfo::physicalCameraDeviceStatusChange(
+            const std::string& cameraDeviceName,
+            const std::string& physicalCameraDeviceName,
+            HalDeviceStatusType newStatus) {
+    physicalCameraDeviceStatusChangeInternal(cameraDeviceName, physicalCameraDeviceName,
+            HalToFrameworkCameraDeviceStatus(newStatus));
+    return ::ndk::ScopedAStatus::ok();
+};
+
+std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
+    AidlProviderInfo::initializeDeviceInfo(
+        const std::string &name, const metadata_vendor_id_t tagId,
+        const std::string &id, uint16_t minorVersion) {
+    ::ndk::ScopedAStatus status;
+
+    auto cameraInterface = startDeviceInterface(name);
+    if (cameraInterface == nullptr) return nullptr;
+
+    camera::common::CameraResourceCost resourceCost;
+    status = cameraInterface->getResourceCost(&resourceCost);
+    if (!status.isOk()) {
+        ALOGE("%s: Unable to obtain resource costs for camera device %s: %s", __FUNCTION__,
+                name.c_str(), status.getMessage());
+        return nullptr;
+    }
+
+    for (auto& conflictName : resourceCost.conflictingDevices) {
+        uint16_t major, minor;
+        std::string type, id;
+        status_t res = parseDeviceName(conflictName, &major, &minor, &type, &id);
+        if (res != OK) {
+            ALOGE("%s: Failed to parse conflicting device %s", __FUNCTION__, conflictName.c_str());
+            return nullptr;
+        }
+        conflictName = id;
+    }
+
+    return std::unique_ptr<DeviceInfo3>(
+        new AidlDeviceInfo3(name, tagId, id, minorVersion, HalToFrameworkResourceCost(resourceCost),
+                this, mProviderPublicCameraIds, cameraInterface));
+}
+
+status_t AidlProviderInfo::reCacheConcurrentStreamingCameraIdsLocked() {
+
+    // Check if the provider is currently active - not going to start it up for this notification
+    auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.lock();
+    if (interface == nullptr) {
+        ALOGE("%s: camera provider interface for %s is not valid", __FUNCTION__,
+                mProviderName.c_str());
+        return INVALID_OPERATION;
+    }
+
+    return getConcurrentCameraIdsInternalLocked(interface);
+}
+
+status_t AidlProviderInfo::getConcurrentCameraIdsInternalLocked(
+        std::shared_ptr<ICameraProvider> &interface) {
+    if (interface == nullptr) {
+        ALOGE("%s: null interface provided", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    std::vector<aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination> combs;
+    ::ndk::ScopedAStatus status = interface->getConcurrentCameraIds(&combs);
+
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error in getting concurrent camera ID list from provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return mapToStatusT(status);
+    }
+    mConcurrentCameraIdCombinations.clear();
+    for (const auto& combination : combs) {
+        std::unordered_set<std::string> deviceIds;
+        for (const auto &cameraDeviceId : combination.combination) {
+            deviceIds.insert(cameraDeviceId.c_str());
+        }
+        mConcurrentCameraIdCombinations.push_back(std::move(deviceIds));
+    }
+
+    return OK;
+}
+
+AidlProviderInfo::AidlDeviceInfo3::AidlDeviceInfo3(
+        const std::string& name,
+        const metadata_vendor_id_t tagId,
+        const std::string &id, uint16_t minorVersion,
+        const CameraResourceCost& resourceCost,
+        sp<CameraProviderManager::ProviderInfo> parentProvider,
+        const std::vector<std::string>& publicCameraIds,
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice> interface) :
+        DeviceInfo3(name, tagId, id, minorVersion, resourceCost, parentProvider, publicCameraIds) {
+
+    // Get camera characteristics and initialize flash unit availability
+    aidl::android::hardware::camera::device::CameraMetadata chars;
+    ::ndk::ScopedAStatus status = interface->getCameraCharacteristics(&chars);
+    std::vector<uint8_t> &metadata = chars.metadata;
+    camera_metadata_t *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
+    size_t expectedSize = metadata.size();
+    int resV = validate_camera_metadata_structure(buffer, &expectedSize);
+    if (resV == OK || resV == CAMERA_METADATA_VALIDATION_SHIFTED) {
+        set_camera_metadata_vendor_id(buffer, mProviderTagid);
+        mCameraCharacteristics = buffer;
+    } else {
+        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+        return;
+    }
+
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error getting camera characteristics for device %s"
+                " to check for a flash unit: %s", __FUNCTION__, id.c_str(),
+                status.getMessage());
+        return;
+    }
+
+    if (mCameraCharacteristics.exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+        const auto &stateMap = mCameraCharacteristics.find(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
+        if ((stateMap.count > 0) && ((stateMap.count % 2) == 0)) {
+            for (size_t i = 0; i < stateMap.count; i += 2) {
+                mDeviceStateOrientationMap.emplace(stateMap.data.i64[i], stateMap.data.i64[i+1]);
+            }
+        } else {
+            ALOGW("%s: Invalid ANDROID_INFO_DEVICE_STATE_ORIENTATIONS map size: %zu", __FUNCTION__,
+                    stateMap.count);
+        }
+    }
+
+    mSystemCameraKind = getSystemCameraKind();
+
+    status_t res = fixupMonochromeTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to fix up monochrome tags based for older HAL version: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return;
+    }
+    auto stat = addDynamicDepthTags();
+    if (OK != stat) {
+        ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
+                stat);
+    }
+    res = deriveHeicTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+
+    if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+        status_t status = addDynamicDepthTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
+                    __FUNCTION__, strerror(-status), status);
+        }
+
+        status = deriveHeicTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
+    }
+
+    res = addRotateCropTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
+    res = addPreCorrectionActiveArraySize();
+    if (OK != res) {
+        ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
+    res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
+            &mCameraCharacteristics, &mSupportNativeZoomRatio);
+    if (OK != res) {
+        ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+
+    camera_metadata_entry flashAvailable =
+            mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
+    if (flashAvailable.count == 1 &&
+            flashAvailable.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
+        mHasFlashUnit = true;
+        // Fix up flash strength tags for devices without these keys.
+        res = fixupTorchStrengthTags();
+        if (OK != res) {
+            ALOGE("%s: Unable to add default ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL and"
+                    "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+        }
+    } else {
+        mHasFlashUnit = false;
+    }
+
+    camera_metadata_entry entry =
+            mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
+    if (entry.count == 1) {
+        mTorchDefaultStrengthLevel = entry.data.i32[0];
+    } else {
+        mTorchDefaultStrengthLevel = 0;
+    }
+    entry = mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL);
+    if (entry.count == 1) {
+        mTorchMaximumStrengthLevel = entry.data.i32[0];
+    } else {
+        mTorchMaximumStrengthLevel = 0;
+    }
+
+    mTorchStrengthLevel = 0;
+
+    queryPhysicalCameraIds();
+
+    // Get physical camera characteristics if applicable
+    if (mIsLogicalCamera) {
+        for (auto& id : mPhysicalIds) {
+            if (std::find(mPublicCameraIds.begin(), mPublicCameraIds.end(), id) !=
+                    mPublicCameraIds.end()) {
+                continue;
+            }
+
+            aidl::android::hardware::camera::device::CameraMetadata pChars;
+            status = interface->getPhysicalCameraCharacteristics(id, &pChars);
+            if (!status.isOk()) {
+                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
+                        __FUNCTION__, id.c_str(), id.c_str(), status.getMessage());
+                return;
+            }
+            std::vector<uint8_t> &pMetadata = pChars.metadata;
+            camera_metadata_t *pBuffer =
+                    reinterpret_cast<camera_metadata_t*>(pMetadata.data());
+            size_t expectedSize = pMetadata.size();
+            int res = validate_camera_metadata_structure(pBuffer, &expectedSize);
+            if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
+                set_camera_metadata_vendor_id(pBuffer, mProviderTagid);
+                mPhysicalCameraCharacteristics[id] = pBuffer;
+            } else {
+                ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+                return;
+            }
+
+            res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
+                    &mPhysicalCameraCharacteristics[id], &mSupportNativeZoomRatio);
+            if (OK != res) {
+                ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            }
+        }
+    }
+
+    if (!kEnableLazyHal) {
+        // Save HAL reference indefinitely
+        mSavedInterface = interface;
+    }
+}
+
+status_t AidlProviderInfo::AidlDeviceInfo3::setTorchMode(bool enabled) {
+    const std::shared_ptr<camera::device::ICameraDevice> interface = startDeviceInterface();
+    ::ndk::ScopedAStatus s = interface->setTorchMode(enabled);
+    if (!s.isOk()) {
+        ALOGE("%s Unable to set torch mode: %s", __FUNCTION__, s.getMessage());
+        return mapToStatusT(s);
+    }
+    return OK;
+}
+
+status_t AidlProviderInfo::AidlDeviceInfo3::turnOnTorchWithStrengthLevel(
+        int32_t torchStrength) {
+    const std::shared_ptr<camera::device::ICameraDevice> interface = startDeviceInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+
+    ::ndk::ScopedAStatus s = interface->turnOnTorchWithStrengthLevel(torchStrength);
+    if (!s.isOk()) {
+        ALOGE("%s Unable to set torch mode strength %d : %s", __FUNCTION__, torchStrength,
+                s.getMessage());
+        return mapToStatusT(s);
+    }
+    mTorchStrengthLevel = torchStrength;
+    return OK;
+}
+
+status_t AidlProviderInfo::AidlDeviceInfo3::getTorchStrengthLevel(int32_t *torchStrength) {
+    if (torchStrength == nullptr) {
+        return BAD_VALUE;
+    }
+    const std::shared_ptr<camera::device::ICameraDevice> interface = startDeviceInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+
+    ::ndk::ScopedAStatus status = interface->getTorchStrengthLevel(torchStrength);
+    if (!status.isOk()) {
+        ALOGE("%s: Couldn't get torch strength level: %s", __FUNCTION__, status.getMessage());
+        return mapToStatusT(status);
+    }
+    return OK;
+}
+
+std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
+AidlProviderInfo::AidlDeviceInfo3::startDeviceInterface() {
+    Mutex::Autolock l(mDeviceAvailableLock);
+    std::shared_ptr<camera::device::ICameraDevice> device;
+    ATRACE_CALL();
+    if (mSavedInterface == nullptr) {
+        sp<AidlProviderInfo> parentProvider =
+                static_cast<AidlProviderInfo *>(mParentProvider.promote().get());
+        if (parentProvider != nullptr) {
+            // Wait for lazy HALs to confirm device availability
+            if (parentProvider->isExternalLazyHAL() && !mIsDeviceAvailable) {
+                ALOGV("%s: Wait for external device to become available %s",
+                      __FUNCTION__,
+                      mId.c_str());
+
+                auto res = mDeviceAvailableSignal.waitRelative(mDeviceAvailableLock,
+                                                         kDeviceAvailableTimeout);
+                if (res != OK) {
+                    ALOGE("%s: Failed waiting for device to become available",
+                          __FUNCTION__);
+                    return nullptr;
+                }
+            }
+
+            device = parentProvider->startDeviceInterface(mName);
+        }
+    } else {
+        device = mSavedInterface;
+    }
+    return device;
+}
+
+status_t AidlProviderInfo::AidlDeviceInfo3::dumpState(int fd) {
+    const std::shared_ptr<camera::device::ICameraDevice> interface = startDeviceInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+    const char *args = nullptr;
+    auto ret = interface->dump(fd, &args, /*numArgs*/0);
+    if (ret != OK) {
+        return ret;
+    }
+    return OK;
+}
+
+status_t AidlProviderInfo::AidlDeviceInfo3::isSessionConfigurationSupported(
+        const SessionConfiguration &configuration, bool overrideForPerfClass,
+        camera3::metadataGetter getMetadata, bool *status) {
+
+    camera::device::StreamConfiguration streamConfiguration;
+    bool earlyExit = false;
+    auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
+            String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
+            streamConfiguration, overrideForPerfClass, &earlyExit);
+
+    if (!bRes.isOk()) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (earlyExit) {
+        *status = false;
+        return OK;
+    }
+
+    const std::shared_ptr<camera::device::ICameraDevice> interface =
+            startDeviceInterface();
+
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+
+    ::ndk::ScopedAStatus ret =
+        interface->isStreamCombinationSupported(streamConfiguration, status);
+    if (!ret.isOk()) {
+        *status = false;
+        ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.getMessage());
+        return mapToStatusT(ret);
+    }
+    return OK;
+
+}
+
+status_t AidlProviderInfo::convertToAidlHALStreamCombinationAndCameraIdsLocked(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion,
+        std::vector<camera::provider::CameraIdAndStreamCombination>
+                *halCameraIdsAndStreamCombinations,
+        bool *earlyExit) {
+    binder::Status bStatus = binder::Status::ok();
+    std::vector<camera::provider::CameraIdAndStreamCombination> halCameraIdsAndStreamsV;
+    bool shouldExit = false;
+    status_t res = OK;
+    for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
+        const std::string& cameraId = cameraIdAndSessionConfig.mCameraId;
+        camera::device::StreamConfiguration streamConfiguration;
+        CameraMetadata deviceInfo;
+        bool overrideForPerfClass =
+                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+                        perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        if (res != OK) {
+            return res;
+        }
+        camera3::metadataGetter getMetadata =
+                [this](const String8 &id, bool overrideForPerfClass) {
+                    CameraMetadata physicalDeviceInfo;
+                    mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
+                                                   &physicalDeviceInfo);
+                    return physicalDeviceInfo;
+                };
+        std::vector<std::string> physicalCameraIds;
+        mManager->isLogicalCameraLocked(cameraId, &physicalCameraIds);
+        bStatus =
+            SessionConfigurationUtils::convertToHALStreamCombination(
+                    cameraIdAndSessionConfig.mSessionConfiguration,
+                    String8(cameraId.c_str()), deviceInfo, getMetadata,
+                    physicalCameraIds, streamConfiguration,
+                    overrideForPerfClass, &shouldExit);
+        if (!bStatus.isOk()) {
+            ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+        if (shouldExit) {
+            *earlyExit = true;
+            return OK;
+        }
+        camera::provider::CameraIdAndStreamCombination halCameraIdAndStream;
+        halCameraIdAndStream.cameraId = cameraId;
+        halCameraIdAndStream.streamConfiguration = streamConfiguration;
+        halCameraIdsAndStreamsV.push_back(halCameraIdAndStream);
+    }
+    *halCameraIdsAndStreamCombinations = halCameraIdsAndStreamsV;
+    return OK;
+}
+
+status_t AidlProviderInfo::isConcurrentSessionConfigurationSupported(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion, bool *isSupported) {
+
+      std::vector<camera::provider::CameraIdAndStreamCombination> halCameraIdsAndStreamCombinations;
+      bool knowUnsupported = false;
+      status_t res = convertToAidlHALStreamCombinationAndCameraIdsLocked(
+              cameraIdsAndSessionConfigs, perfClassPrimaryCameraIds,
+              targetSdkVersion, &halCameraIdsAndStreamCombinations, &knowUnsupported);
+      if (res != OK) {
+          ALOGE("%s unable to convert session configurations provided to HAL stream"
+                "combinations", __FUNCTION__);
+          return res;
+      }
+      if (knowUnsupported) {
+          // We got to know the streams aren't valid before doing the HAL
+          // call itself.
+          *isSupported = false;
+          return OK;
+      }
+
+      // Check if the provider is currently active - not going to start it up for this notification
+      auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.lock();
+      if (interface == nullptr) {
+          // TODO: This might be some other problem
+          return INVALID_OPERATION;
+      }
+      ::ndk::ScopedAStatus status = interface->isConcurrentStreamCombinationSupported(
+              halCameraIdsAndStreamCombinations, isSupported);
+      if (!status.isOk()) {
+          *isSupported = false;
+          ALOGE("%s: hal interface session configuration query failed", __FUNCTION__);
+          return mapToStatusT(status);
+      }
+
+    return OK;
+}
+
+} //namespace android
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
new file mode 100644
index 0000000..97a8fed
--- /dev/null
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_AIDLPROVIDERINFOH
+#define ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_AIDLPROVIDERINFOH
+
+#include "common/CameraProviderManager.h"
+
+#include <aidl/android/hardware/camera/common/Status.h>
+#include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h>
+#include <aidl/android/hardware/camera/device/ICameraDevice.h>
+
+namespace android {
+
+struct AidlProviderInfo : public CameraProviderManager::ProviderInfo {
+    // Current overall Android device physical status
+    int64_t mDeviceState;
+
+    // This pointer is used to keep a reference to the ICameraProvider that was last accessed.
+    std::weak_ptr<aidl::android::hardware::camera::provider::ICameraProvider> mActiveInterface;
+
+    std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider> mSavedInterface;
+
+    AidlProviderInfo(
+            const std::string &providerName,
+            const std::string &providerInstance,
+            CameraProviderManager *manager);
+
+    static status_t mapToStatusT(const ndk::ScopedAStatus& s);
+
+    // Start camera device interface, start the camera provider process for lazy
+    // hals, if needed
+    status_t initializeAidlProvider(
+        std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>& interface,
+        int64_t currentDeviceState);
+
+    static void binderDied(void *cookie);
+
+    virtual IPCTransport getIPCTransport() override {return IPCTransport::AIDL;}
+
+    const std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+    startProviderInterface();
+
+    virtual status_t setUpVendorTags() override;
+    virtual status_t notifyDeviceStateChange(int64_t newDeviceState) override;
+
+    virtual bool successfullyStartedProviderInterface() override;
+
+    virtual int64_t getDeviceState() override { return mDeviceState; };
+
+    /**
+     * Query the camera provider for concurrent stream configuration support
+     */
+    virtual status_t isConcurrentSessionConfigurationSupported(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion, bool *isSupported) override;
+
+    std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
+            startDeviceInterface(const std::string &deviceName);
+
+    // AIDL ICameraProviderCallback interface - these lock the parent
+    // mInterfaceMutex
+
+    ::ndk::ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
+            ::aidl::android::hardware::camera::common::CameraDeviceStatus newStatus);
+
+    ::ndk::ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
+            ::aidl::android::hardware::camera::common::TorchModeStatus newStatus);
+
+    ::ndk::ScopedAStatus physicalCameraDeviceStatusChange(
+            const std::string& cameraDeviceName,
+            const std::string& physicalCameraDeviceName,
+            ::aidl::android::hardware::camera::common::CameraDeviceStatus newStatus);
+
+    struct AidlProviderCallbacks :
+            public aidl::android::hardware::camera::provider::BnCameraProviderCallback {
+        AidlProviderCallbacks(wp<AidlProviderInfo> parent) : mParent(parent) { }
+        virtual ::ndk::ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName,
+                ::aidl::android::hardware::camera::common::CameraDeviceStatus newStatus) override;
+
+        virtual ::ndk::ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName,
+                ::aidl::android::hardware::camera::common::TorchModeStatus newStatus) override;
+
+        virtual ::ndk::ScopedAStatus physicalCameraDeviceStatusChange(
+                const std::string& cameraDeviceName,
+                const std::string& physicalCameraDeviceName,
+                ::aidl::android::hardware::camera::common::CameraDeviceStatus newStatus) override;
+
+       private:
+        wp<AidlProviderInfo> mParent = nullptr;
+
+    };
+
+    struct AidlDeviceInfo3 : public CameraProviderManager::ProviderInfo::DeviceInfo3 {
+
+        //TODO: fix init
+        const hardware::hidl_version mVersion = hardware::hidl_version{3, 2};
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
+                mSavedInterface = nullptr;
+
+        AidlDeviceInfo3(const std::string& , const metadata_vendor_id_t ,
+                const std::string &, uint16_t ,
+                const CameraResourceCost& ,
+                sp<ProviderInfo> ,
+                const std::vector<std::string>& ,
+                std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>);
+
+        ~AidlDeviceInfo3() {}
+
+        virtual status_t setTorchMode(bool enabled) override;
+        virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) override;
+        virtual status_t getTorchStrengthLevel(int32_t *torchStrength) override;
+
+        virtual status_t dumpState(int fd) override;
+
+        virtual status_t isSessionConfigurationSupported(
+                const SessionConfiguration &/*configuration*/,
+                bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
+                bool *status/*status*/);
+
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
+                startDeviceInterface();
+    };
+
+ private:
+
+    // Helper for initializeDeviceInfo to use the right CameraProvider get method.
+    virtual std::unique_ptr<DeviceInfo> initializeDeviceInfo(const std::string &,
+            const metadata_vendor_id_t , const std::string &,
+            uint16_t ) override;
+
+    virtual status_t reCacheConcurrentStreamingCameraIdsLocked() override;
+
+    //Expects to have mLock locked
+
+    status_t getConcurrentCameraIdsInternalLocked(
+        std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider> &interface);
+
+    //expects to have mManager->mInterfaceMutex locked
+
+    status_t convertToAidlHALStreamCombinationAndCameraIdsLocked(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion,
+        std::vector<aidl::android::hardware::camera::provider::CameraIdAndStreamCombination>
+                *halCameraIdsAndStreamCombinations,
+        bool *earlyExit);
+    std::shared_ptr<AidlProviderCallbacks> mCallbacks = nullptr;
+    ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+};
+
+} // namespace android
+#endif
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
new file mode 100644
index 0000000..bded9aa
--- /dev/null
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -0,0 +1,1050 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "HidlProviderInfo.h"
+#include "common/HalConversionsTemplated.h"
+#include "common/CameraProviderInfoTemplated.h"
+
+#include <cutils/properties.h>
+
+#include <android/hardware/ICameraService.h>
+#include <camera_metadata_hidden.h>
+
+#include "device3/ZoomRatioMapper.h"
+#include <utils/SessionConfigurationUtilsHidl.h>
+#include <utils/Trace.h>
+
+#include <android/hardware/camera/device/3.7/ICameraDevice.h>
+
+namespace {
+const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
+} // anonymous namespace
+
+namespace android {
+
+using namespace android::camera3;
+using namespace hardware::camera;
+using hardware::camera::common::V1_0::VendorTagSection;
+using hardware::camera::common::V1_0::Status;
+using hardware::camera::provider::V2_7::CameraIdAndStreamCombination;
+using hardware::camera2::utils::CameraIdAndSessionConfiguration;
+
+
+using StatusListener = CameraProviderManager::StatusListener;
+using HalDeviceStatusType = android::hardware::camera::common::V1_0::CameraDeviceStatus;
+
+using hardware::camera::provider::V2_5::DeviceState;
+using hardware::ICameraService;
+
+status_t HidlProviderInfo::mapToStatusT(const Status& s)  {
+    switch(s) {
+        case Status::OK:
+            return OK;
+        case Status::ILLEGAL_ARGUMENT:
+            return BAD_VALUE;
+        case Status::CAMERA_IN_USE:
+            return -EBUSY;
+        case Status::MAX_CAMERAS_IN_USE:
+            return -EUSERS;
+        case Status::METHOD_NOT_SUPPORTED:
+            return UNKNOWN_TRANSACTION;
+        case Status::OPERATION_NOT_SUPPORTED:
+            return INVALID_OPERATION;
+        case Status::CAMERA_DISCONNECTED:
+            return DEAD_OBJECT;
+        case Status::INTERNAL_ERROR:
+            return INVALID_OPERATION;
+    }
+    ALOGW("Unexpected HAL status code %d", s);
+    return INVALID_OPERATION;
+}
+
+static hardware::hidl_bitfield<DeviceState> mapToHidlDeviceState(int64_t newState) {
+    hardware::hidl_bitfield<DeviceState> newDeviceState{};
+    if (newState & ICameraService::DEVICE_STATE_BACK_COVERED) {
+        newDeviceState |= DeviceState::BACK_COVERED;
+    }
+    if (newState & ICameraService::DEVICE_STATE_FRONT_COVERED) {
+        newDeviceState |= DeviceState::FRONT_COVERED;
+    }
+    if (newState & ICameraService::DEVICE_STATE_FOLDED) {
+        newDeviceState |= DeviceState::FOLDED;
+    }
+    // Only map vendor bits directly
+    uint64_t vendorBits = static_cast<uint64_t>(newState) & 0xFFFFFFFF00000000l;
+    newDeviceState |= vendorBits;
+
+    ALOGV("%s: New device state 0x%" PRIx64, __FUNCTION__, newDeviceState);
+    return newDeviceState;
+}
+
+const char* statusToString(const Status& s) {
+    switch(s) {
+        case Status::OK:
+            return "OK";
+        case Status::ILLEGAL_ARGUMENT:
+            return "ILLEGAL_ARGUMENT";
+        case Status::CAMERA_IN_USE:
+            return "CAMERA_IN_USE";
+        case Status::MAX_CAMERAS_IN_USE:
+            return "MAX_CAMERAS_IN_USE";
+        case Status::METHOD_NOT_SUPPORTED:
+            return "METHOD_NOT_SUPPORTED";
+        case Status::OPERATION_NOT_SUPPORTED:
+            return "OPERATION_NOT_SUPPORTED";
+        case Status::CAMERA_DISCONNECTED:
+            return "CAMERA_DISCONNECTED";
+        case Status::INTERNAL_ERROR:
+            return "INTERNAL_ERROR";
+    }
+    ALOGW("Unexpected HAL status code %d", s);
+    return "UNKNOWN_ERROR";
+}
+
+status_t HidlProviderInfo::initializeHidlProvider(
+        sp<provider::V2_4::ICameraProvider>& interface,
+        int64_t currentDeviceState) {
+    status_t res = parseProviderName(mProviderName, &mType, &mId);
+    if (res != OK) {
+        ALOGE("%s: Invalid provider name, ignoring", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    ALOGI("Connecting to new camera provider: %s, isRemote? %d",
+            mProviderName.c_str(), interface->isRemote());
+
+    // Determine minor version
+    mMinorVersion = 4;
+    auto cast2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
+    sp<provider::V2_6::ICameraProvider> interface2_6 = nullptr;
+    if (cast2_6.isOk()) {
+        interface2_6 = cast2_6;
+        if (interface2_6 != nullptr) {
+            mMinorVersion = 6;
+        }
+    }
+    // We need to check again since cast2_6.isOk() succeeds even if the provider
+    // version isn't actually 2.6.
+    if (interface2_6 == nullptr){
+        auto cast2_5 =
+                provider::V2_5::ICameraProvider::castFrom(interface);
+        sp<provider::V2_5::ICameraProvider> interface2_5 = nullptr;
+        if (cast2_5.isOk()) {
+            interface2_5 = cast2_5;
+            if (interface != nullptr) {
+                mMinorVersion = 5;
+            }
+        }
+    } else {
+        auto cast2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+        if (cast2_7.isOk()) {
+            sp<provider::V2_7::ICameraProvider> interface2_7 = cast2_7;
+            if (interface2_7 != nullptr) {
+                mMinorVersion = 7;
+            }
+        }
+    }
+
+    // cameraDeviceStatusChange callbacks may be called (and causing new devices added)
+    // before setCallback returns
+    hardware::Return<Status> status = interface->setCallback(this);
+    if (!status.isOk()) {
+        ALOGE("%s: Transaction error setting up callbacks with camera provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), status.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to register callbacks with camera provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return mapToStatusT(status);
+    }
+
+    hardware::Return<bool> linked = interface->linkToDeath(this, /*cookie*/ mId);
+    if (!linked.isOk()) {
+        ALOGE("%s: Transaction error in linking to camera provider '%s' death: %s",
+                __FUNCTION__, mProviderName.c_str(), linked.description().c_str());
+        return DEAD_OBJECT;
+    } else if (!linked) {
+        ALOGW("%s: Unable to link to provider '%s' death notifications",
+                __FUNCTION__, mProviderName.c_str());
+    }
+
+    if (!kEnableLazyHal) {
+        // Save HAL reference indefinitely
+        mSavedInterface = interface;
+    } else {
+        mActiveInterface = interface;
+    }
+
+    ALOGV("%s: Setting device state for %s: 0x%" PRIx64,
+            __FUNCTION__, mProviderName.c_str(), mDeviceState);
+    notifyDeviceStateChange(currentDeviceState);
+
+    res = setUpVendorTags();
+    if (res != OK) {
+        ALOGE("%s: Unable to set up vendor tags from provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return res;
+    }
+
+    // Get initial list of camera devices, if any
+    std::vector<std::string> devices;
+    hardware::Return<void> ret = interface->getCameraIdList([&status, this, &devices](
+            Status idStatus,
+            const hardware::hidl_vec<hardware::hidl_string>& cameraDeviceNames) {
+        status = idStatus;
+        if (status == Status::OK) {
+            for (auto& name : cameraDeviceNames) {
+                uint16_t major, minor;
+                std::string type, id;
+                status_t res = parseDeviceName(name, &major, &minor, &type, &id);
+                if (res != OK) {
+                    ALOGE("%s: Error parsing deviceName: %s: %d", __FUNCTION__, name.c_str(), res);
+                    status = Status::INTERNAL_ERROR;
+                } else {
+                    devices.push_back(name);
+                    mProviderPublicCameraIds.push_back(id);
+                }
+            }
+        } });
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error in getting camera ID list from provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), linked.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to query for camera devices from provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+        return mapToStatusT(status);
+    }
+
+    // Get list of concurrent streaming camera device combinations
+    if (mMinorVersion >= 6) {
+        res = getConcurrentCameraIdsInternalLocked(interface2_6);
+        if (res != OK) {
+            return res;
+        }
+    }
+
+    ret = interface->isSetTorchModeSupported(
+        [this](auto status, bool supported) {
+            if (status == Status::OK) {
+                mSetTorchModeSupported = supported;
+            }
+        });
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error checking torch mode support '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), ret.description().c_str());
+        return DEAD_OBJECT;
+    }
+
+    mIsRemote = interface->isRemote();
+
+    initializeProviderInfoCommon(devices);
+
+    return OK;
+}
+
+status_t HidlProviderInfo::setUpVendorTags() {
+    if (mVendorTagDescriptor != nullptr)
+        return OK;
+
+    hardware::hidl_vec<VendorTagSection> vts;
+    Status status;
+    hardware::Return<void> ret;
+    const sp<hardware::camera::provider::V2_4::ICameraProvider> interface =
+            startProviderInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+    ret = interface->getVendorTags(
+        [&](auto s, const auto& vendorTagSecs) {
+            status = s;
+            if (s == Status::OK) {
+                vts = vendorTagSecs;
+            }
+    });
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error getting vendor tags from provider '%s': %s",
+                __FUNCTION__, mProviderName.c_str(), ret.description().c_str());
+        return DEAD_OBJECT;
+    }
+    if (status != Status::OK) {
+        return mapToStatusT(status);
+    }
+
+    // Read all vendor tag definitions into a descriptor
+    status_t res;
+    if ((res = IdlVendorTagDescriptor::createDescriptorFromIdl<
+                hardware::hidl_vec<hardware::camera::common::V1_0::VendorTagSection>,
+                        hardware::camera::common::V1_0::VendorTagSection>(vts,
+                                /*out*/mVendorTagDescriptor))
+            != OK) {
+        ALOGE("%s: Could not generate descriptor from vendor tag operations,"
+                "received error %s (%d). Camera clients will not be able to use"
+                "vendor tags", __FUNCTION__, strerror(res), res);
+        return res;
+    }
+
+    return OK;
+}
+
+status_t HidlProviderInfo::notifyDeviceStateChange(int64_t newDeviceState) {
+    mDeviceState = mapToHidlDeviceState(newDeviceState);
+    if (mMinorVersion >= 5) {
+        // Check if the provider is currently active - not going to start it for this notification
+        auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.promote();
+        if (interface != nullptr) {
+            // Send current device state
+            auto castResult = provider::V2_5::ICameraProvider::castFrom(interface);
+            if (castResult.isOk()) {
+                sp<provider::V2_5::ICameraProvider> interface_2_5 = castResult;
+                if (interface_2_5 != nullptr) {
+                    interface_2_5->notifyDeviceStateChange(mDeviceState);
+                }
+            }
+        }
+    }
+    return OK;
+}
+
+sp<device::V3_2::ICameraDevice>
+HidlProviderInfo::startDeviceInterface(const std::string &name) {
+    Status status;
+    sp<device::V3_2::ICameraDevice> cameraInterface;
+    hardware::Return<void> ret;
+    const sp<provider::V2_4::ICameraProvider> interface = startProviderInterface();
+    if (interface == nullptr) {
+        return nullptr;
+    }
+    ret = interface->getCameraDeviceInterface_V3_x(name, [&status, &cameraInterface](
+        Status s, sp<device::V3_2::ICameraDevice> interface) {
+                status = s;
+                cameraInterface = interface;
+            });
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error trying to obtain interface for camera device %s: %s",
+                __FUNCTION__, name.c_str(), ret.description().c_str());
+        return nullptr;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
+                name.c_str(), statusToString(status));
+        return nullptr;
+    }
+    return cameraInterface;
+}
+
+bool HidlProviderInfo::successfullyStartedProviderInterface() {
+    return startProviderInterface() != nullptr;
+}
+
+const sp<provider::V2_4::ICameraProvider>
+HidlProviderInfo::startProviderInterface() {
+    ATRACE_CALL();
+    ALOGV("Request to start camera provider: %s", mProviderName.c_str());
+    if (mSavedInterface != nullptr) {
+        return mSavedInterface;
+    }
+    if (!kEnableLazyHal) {
+        ALOGE("Bad provider state! Should not be here on a non-lazy HAL!");
+        return nullptr;
+    }
+
+    auto interface = mActiveInterface.promote();
+    if (interface == nullptr) {
+        // Try to get service without starting
+        interface = mManager->mHidlServiceProxy->tryGetService(mProviderName);
+        if (interface == nullptr) {
+            ALOGV("Camera provider actually needs restart, calling getService(%s)",
+                  mProviderName.c_str());
+            interface = mManager->mHidlServiceProxy->getService(mProviderName);
+
+            // Set all devices as ENUMERATING, provider should update status
+            // to PRESENT after initializing.
+            // This avoids failing getCameraDeviceInterface_V3_x before devices
+            // are ready.
+            for (auto& device : mDevices) {
+              device->mIsDeviceAvailable = false;
+            }
+
+            interface->setCallback(this);
+            hardware::Return<bool>
+                linked = interface->linkToDeath(this, /*cookie*/ mId);
+            if (!linked.isOk()) {
+              ALOGE(
+                  "%s: Transaction error in linking to camera provider '%s' death: %s",
+                  __FUNCTION__,
+                  mProviderName.c_str(),
+                  linked.description().c_str());
+              mManager->removeProvider(mProviderName);
+              return nullptr;
+            } else if (!linked) {
+              ALOGW("%s: Unable to link to provider '%s' death notifications",
+                    __FUNCTION__, mProviderName.c_str());
+            }
+            // Send current device state
+            if (mMinorVersion >= 5) {
+              auto castResult =
+                  provider::V2_5::ICameraProvider::castFrom(interface);
+              if (castResult.isOk()) {
+                sp<provider::V2_5::ICameraProvider> interface_2_5 = castResult;
+                if (interface_2_5 != nullptr) {
+                  ALOGV("%s: Initial device state for %s: 0x %" PRIx64,
+                        __FUNCTION__, mProviderName.c_str(), mDeviceState);
+                  interface_2_5->notifyDeviceStateChange(mDeviceState);
+                }
+              }
+            }
+        }
+        mActiveInterface = interface;
+    } else {
+        ALOGV("Camera provider (%s) already in use. Re-using instance.",
+              mProviderName.c_str());
+    }
+
+    return interface;
+}
+
+hardware::Return<void> HidlProviderInfo::cameraDeviceStatusChange(
+        const hardware::hidl_string& cameraDeviceName,
+        HalDeviceStatusType newStatus) {
+    cameraDeviceStatusChangeInternal(cameraDeviceName, HalToFrameworkCameraDeviceStatus(newStatus));
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlProviderInfo::physicalCameraDeviceStatusChange(
+        const hardware::hidl_string& cameraDeviceName,
+        const hardware::hidl_string& physicalCameraDeviceName,
+        HalDeviceStatusType newStatus) {
+    physicalCameraDeviceStatusChangeInternal(cameraDeviceName, physicalCameraDeviceName,
+            HalToFrameworkCameraDeviceStatus(newStatus));
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlProviderInfo::torchModeStatusChange(
+        const hardware::hidl_string& cameraDeviceName,
+        hardware::camera::common::V1_0::TorchModeStatus newStatus) {
+
+    torchModeStatusChangeInternal(cameraDeviceName, HalToFrameworkTorchModeStatus(newStatus));
+    return hardware::Void();
+}
+
+void HidlProviderInfo::serviceDied(uint64_t cookie,
+        const wp<hidl::base::V1_0::IBase>& who) {
+    (void) who;
+    ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
+    if (cookie != mId) {
+        ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
+                __FUNCTION__, cookie, mId);
+    }
+    mManager->removeProvider(mProviderInstance);
+}
+
+std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
+    HidlProviderInfo::initializeDeviceInfo(
+        const std::string &name, const metadata_vendor_id_t tagId,
+        const std::string &id, uint16_t minorVersion) {
+    Status status;
+
+    auto cameraInterface = startDeviceInterface(name);
+    if (cameraInterface == nullptr) return nullptr;
+
+    common::V1_0::CameraResourceCost resourceCost;
+    cameraInterface->getResourceCost([&status, &resourceCost](
+        Status s, common::V1_0::CameraResourceCost cost) {
+                status = s;
+                resourceCost = cost;
+            });
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to obtain resource costs for camera device %s: %s", __FUNCTION__,
+                name.c_str(), statusToString(status));
+        return nullptr;
+    }
+
+    for (auto& conflictName : resourceCost.conflictingDevices) {
+        uint16_t major, minor;
+        std::string type, id;
+        status_t res = parseDeviceName(conflictName, &major, &minor, &type, &id);
+        if (res != OK) {
+            ALOGE("%s: Failed to parse conflicting device %s", __FUNCTION__, conflictName.c_str());
+            return nullptr;
+        }
+        conflictName = id;
+    }
+
+    return std::unique_ptr<DeviceInfo3>(
+        new HidlDeviceInfo3(name, tagId, id, minorVersion, HalToFrameworkResourceCost(resourceCost),
+                this, mProviderPublicCameraIds, cameraInterface));
+}
+
+status_t HidlProviderInfo::reCacheConcurrentStreamingCameraIdsLocked() {
+    if (mMinorVersion < 6) {
+      // Unsupported operation, nothing to do here
+      return OK;
+    }
+    // Check if the provider is currently active - not going to start it up for this notification
+    auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.promote();
+    if (interface == nullptr) {
+        ALOGE("%s: camera provider interface for %s is not valid", __FUNCTION__,
+                mProviderName.c_str());
+        return INVALID_OPERATION;
+    }
+    auto castResult = provider::V2_6::ICameraProvider::castFrom(interface);
+
+    if (castResult.isOk()) {
+        sp<provider::V2_6::ICameraProvider> interface2_6 = castResult;
+        if (interface2_6 != nullptr) {
+            return getConcurrentCameraIdsInternalLocked(interface2_6);
+        } else {
+            // This should not happen since mMinorVersion >= 6
+            ALOGE("%s: mMinorVersion was >= 6, but interface2_6 was nullptr", __FUNCTION__);
+            return UNKNOWN_ERROR;
+        }
+    }
+    return OK;
+}
+
+status_t HidlProviderInfo::getConcurrentCameraIdsInternalLocked(
+        sp<provider::V2_6::ICameraProvider> &interface2_6) {
+    if (interface2_6 == nullptr) {
+        ALOGE("%s: null interface provided", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    Status status = Status::OK;
+    hardware::Return<void> ret =
+            interface2_6->getConcurrentStreamingCameraIds([&status, this](
+            Status concurrentIdStatus, // TODO: Move all instances of hidl_string to 'using'
+            const hardware::hidl_vec<hardware::hidl_vec<hardware::hidl_string>>&
+                        cameraDeviceIdCombinations) {
+            status = concurrentIdStatus;
+            if (status == Status::OK) {
+                mConcurrentCameraIdCombinations.clear();
+                for (auto& combination : cameraDeviceIdCombinations) {
+                    std::unordered_set<std::string> deviceIds;
+                    for (auto &cameraDeviceId : combination) {
+                        deviceIds.insert(cameraDeviceId.c_str());
+                    }
+                    mConcurrentCameraIdCombinations.push_back(std::move(deviceIds));
+                }
+            } });
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error in getting concurrent camera ID list from provider '%s'",
+                __FUNCTION__, mProviderName.c_str());
+            return DEAD_OBJECT;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to query for camera devices from provider '%s'",
+                    __FUNCTION__, mProviderName.c_str());
+        return mapToStatusT(status);
+    }
+    return OK;
+}
+
+HidlProviderInfo::HidlDeviceInfo3::HidlDeviceInfo3(
+        const std::string& name,
+        const metadata_vendor_id_t tagId,
+        const std::string &id, uint16_t minorVersion,
+        const CameraResourceCost& resourceCost,
+        sp<CameraProviderManager::ProviderInfo> parentProvider,
+        const std::vector<std::string>& publicCameraIds,
+        sp<hardware::camera::device::V3_2::ICameraDevice> interface) :
+        DeviceInfo3(name, tagId, id, minorVersion, resourceCost, parentProvider, publicCameraIds) {
+
+    // Get camera characteristics and initialize flash unit availability
+    Status status;
+    hardware::Return<void> ret;
+    ret = interface->getCameraCharacteristics([&status, this](Status s,
+                    device::V3_2::CameraMetadata metadata) {
+                status = s;
+                if (s == Status::OK) {
+                    camera_metadata_t *buffer =
+                            reinterpret_cast<camera_metadata_t*>(metadata.data());
+                    size_t expectedSize = metadata.size();
+                    int res = validate_camera_metadata_structure(buffer, &expectedSize);
+                    if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
+                        set_camera_metadata_vendor_id(buffer, mProviderTagid);
+                        mCameraCharacteristics = buffer;
+                    } else {
+                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+                        status = Status::INTERNAL_ERROR;
+                    }
+                }
+            });
+    if (!ret.isOk()) {
+        ALOGE("%s: Transaction error getting camera characteristics for device %s"
+                " to check for a flash unit: %s", __FUNCTION__, id.c_str(),
+                ret.description().c_str());
+        return;
+    }
+    if (status != Status::OK) {
+        ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
+                __FUNCTION__, id.c_str(), statusToString(status), status);
+        return;
+    }
+
+    if (mCameraCharacteristics.exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+        const auto &stateMap = mCameraCharacteristics.find(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
+        if ((stateMap.count > 0) && ((stateMap.count % 2) == 0)) {
+            for (size_t i = 0; i < stateMap.count; i += 2) {
+                mDeviceStateOrientationMap.emplace(stateMap.data.i64[i], stateMap.data.i64[i+1]);
+            }
+        } else {
+            ALOGW("%s: Invalid ANDROID_INFO_DEVICE_STATE_ORIENTATIONS map size: %zu", __FUNCTION__,
+                    stateMap.count);
+        }
+    }
+
+    mSystemCameraKind = getSystemCameraKind();
+
+    status_t res = fixupMonochromeTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to fix up monochrome tags based for older HAL version: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return;
+    }
+    auto stat = addDynamicDepthTags();
+    if (OK != stat) {
+        ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
+                stat);
+    }
+    res = deriveHeicTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+
+    if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+        status_t status = addDynamicDepthTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
+                    __FUNCTION__, strerror(-status), status);
+        }
+
+        status = deriveHeicTags(/*maxResolution*/true);
+        if (OK != status) {
+            ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
+                    "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+        }
+    }
+
+    res = addRotateCropTags();
+    if (OK != res) {
+        ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
+    res = addPreCorrectionActiveArraySize();
+    if (OK != res) {
+        ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
+                strerror(-res), res);
+    }
+    res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
+            &mCameraCharacteristics, &mSupportNativeZoomRatio);
+    if (OK != res) {
+        ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+
+    camera_metadata_entry flashAvailable =
+            mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
+    if (flashAvailable.count == 1 &&
+            flashAvailable.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
+        mHasFlashUnit = true;
+        // Fix up flash strength tags for devices without these keys.
+        res = fixupTorchStrengthTags();
+        if (OK != res) {
+            ALOGE("%s: Unable to add default ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL and"
+                    "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
+                    strerror(-res), res);
+        }
+    } else {
+        mHasFlashUnit = false;
+    }
+
+    camera_metadata_entry entry =
+            mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
+    if (entry.count == 1) {
+        mTorchDefaultStrengthLevel = entry.data.i32[0];
+    } else {
+        mTorchDefaultStrengthLevel = 0;
+    }
+    entry = mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL);
+    if (entry.count == 1) {
+        mTorchMaximumStrengthLevel = entry.data.i32[0];
+    } else {
+        mTorchMaximumStrengthLevel = 0;
+    }
+
+    mTorchStrengthLevel = 0;
+
+    queryPhysicalCameraIds();
+
+    // Get physical camera characteristics if applicable
+    auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
+    if (!castResult.isOk()) {
+        ALOGV("%s: Unable to convert ICameraDevice instance to version 3.5", __FUNCTION__);
+        return;
+    }
+    sp<device::V3_5::ICameraDevice> interface_3_5 = castResult;
+    if (interface_3_5 == nullptr) {
+        ALOGE("%s: Converted ICameraDevice instance to nullptr", __FUNCTION__);
+        return;
+    }
+
+    if (mIsLogicalCamera) {
+        for (auto& id : mPhysicalIds) {
+            if (std::find(mPublicCameraIds.begin(), mPublicCameraIds.end(), id) !=
+                    mPublicCameraIds.end()) {
+                continue;
+            }
+
+            hardware::hidl_string hidlId(id);
+            ret = interface_3_5->getPhysicalCameraCharacteristics(hidlId,
+                    [&status, &id, this](Status s, device::V3_2::CameraMetadata metadata) {
+                status = s;
+                if (s == Status::OK) {
+                    camera_metadata_t *buffer =
+                            reinterpret_cast<camera_metadata_t*>(metadata.data());
+                    size_t expectedSize = metadata.size();
+                    int res = validate_camera_metadata_structure(buffer, &expectedSize);
+                    if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
+                        set_camera_metadata_vendor_id(buffer, mProviderTagid);
+                        mPhysicalCameraCharacteristics[id] = buffer;
+                    } else {
+                        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+                        status = Status::INTERNAL_ERROR;
+                    }
+                }
+            });
+
+            if (!ret.isOk()) {
+                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
+                        __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
+                return;
+            }
+            if (status != Status::OK) {
+                ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
+                        __FUNCTION__, id.c_str(), mId.c_str(),
+                        statusToString(status), status);
+                return;
+            }
+
+            res = camera3::ZoomRatioMapper::overrideZoomRatioTags(
+                    &mPhysicalCameraCharacteristics[id], &mSupportNativeZoomRatio);
+            if (OK != res) {
+                ALOGE("%s: Unable to override zoomRatio related tags: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            }
+        }
+    }
+
+    if (!kEnableLazyHal) {
+        // Save HAL reference indefinitely
+        mSavedInterface = interface;
+    }
+
+
+}
+
+status_t HidlProviderInfo::HidlDeviceInfo3::setTorchMode(bool enabled) {
+    using hardware::camera::common::V1_0::TorchMode;
+    const sp<hardware::camera::device::V3_2::ICameraDevice> interface = startDeviceInterface();
+    Status s = interface->setTorchMode(enabled ? TorchMode::ON : TorchMode::OFF);
+    return mapToStatusT(s);
+}
+
+status_t HidlProviderInfo::HidlDeviceInfo3::turnOnTorchWithStrengthLevel(
+        int32_t /*torchStrengthLevel*/) {
+    ALOGE("%s HIDL does not support turning on torch with variable strength", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+status_t HidlProviderInfo::HidlDeviceInfo3::getTorchStrengthLevel(int32_t * /*torchStrength*/) {
+    ALOGE("%s HIDL does not support variable torch strength level", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+sp<hardware::camera::device::V3_2::ICameraDevice>
+HidlProviderInfo::HidlDeviceInfo3::startDeviceInterface() {
+    Mutex::Autolock l(mDeviceAvailableLock);
+    sp<hardware::camera::device::V3_2::ICameraDevice> device;
+    ATRACE_CALL();
+    if (mSavedInterface == nullptr) {
+        sp<HidlProviderInfo> parentProvider =
+                static_cast<HidlProviderInfo *>(mParentProvider.promote().get());
+        if (parentProvider != nullptr) {
+            // Wait for lazy HALs to confirm device availability
+            if (parentProvider->isExternalLazyHAL() && !mIsDeviceAvailable) {
+                ALOGV("%s: Wait for external device to become available %s",
+                      __FUNCTION__,
+                      mId.c_str());
+
+                auto res = mDeviceAvailableSignal.waitRelative(mDeviceAvailableLock,
+                                                         kDeviceAvailableTimeout);
+                if (res != OK) {
+                    ALOGE("%s: Failed waiting for device to become available",
+                          __FUNCTION__);
+                    return nullptr;
+                }
+            }
+
+            device = parentProvider->startDeviceInterface(mName);
+        }
+    } else {
+        device = (hardware::camera::device::V3_2::ICameraDevice *) mSavedInterface.get();
+    }
+    return device;
+}
+
+status_t HidlProviderInfo::HidlDeviceInfo3::dumpState(int fd) {
+    native_handle_t* handle = native_handle_create(1,0);
+    handle->data[0] = fd;
+    const sp<hardware::camera::device::V3_2::ICameraDevice> interface =
+            startDeviceInterface();
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+    auto ret = interface->dumpState(handle);
+    native_handle_delete(handle);
+    if (!ret.isOk()) {
+        return INVALID_OPERATION;
+    }
+    return OK;
+}
+
+status_t HidlProviderInfo::HidlDeviceInfo3::isSessionConfigurationSupported(
+        const SessionConfiguration &configuration, bool overrideForPerfClass,
+        metadataGetter getMetadata, bool *status) {
+
+    hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
+    bool earlyExit = false;
+    auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
+            String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
+            configuration_3_7, overrideForPerfClass, &earlyExit);
+
+    if (!bRes.isOk()) {
+        return UNKNOWN_ERROR;
+    }
+
+    if (earlyExit) {
+        *status = false;
+        return OK;
+    }
+
+    const sp<hardware::camera::device::V3_2::ICameraDevice> interface =
+            startDeviceInterface();
+
+    if (interface == nullptr) {
+        return DEAD_OBJECT;
+    }
+
+    auto castResult_3_5 = device::V3_5::ICameraDevice::castFrom(interface);
+    sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult_3_5;
+    auto castResult_3_7 = device::V3_7::ICameraDevice::castFrom(interface);
+    sp<hardware::camera::device::V3_7::ICameraDevice> interface_3_7 = castResult_3_7;
+
+    status_t res;
+    Status callStatus;
+    ::android::hardware::Return<void> ret;
+    auto halCb =
+            [&callStatus, &status] (Status s, bool combStatus) {
+                callStatus = s;
+                *status = combStatus;
+            };
+    if (interface_3_7 != nullptr) {
+        ret = interface_3_7->isStreamCombinationSupported_3_7(configuration_3_7, halCb);
+    } else if (interface_3_5 != nullptr) {
+        hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
+        bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                configuration_3_4, configuration_3_7);
+        if (!success) {
+            *status = false;
+            return OK;
+        }
+        ret = interface_3_5->isStreamCombinationSupported(configuration_3_4, halCb);
+    } else {
+        return INVALID_OPERATION;
+    }
+    if (ret.isOk()) {
+        switch (callStatus) {
+            case Status::OK:
+                // Expected case, do nothing.
+                res = OK;
+                break;
+            case Status::METHOD_NOT_SUPPORTED:
+                res = INVALID_OPERATION;
+                break;
+            default:
+                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, callStatus);
+                res = UNKNOWN_ERROR;
+        }
+    } else {
+        ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
+        res = UNKNOWN_ERROR;
+    }
+
+    return res;
+}
+
+status_t HidlProviderInfo::convertToHALStreamCombinationAndCameraIdsLocked(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion,
+        hardware::hidl_vec<CameraIdAndStreamCombination> *halCameraIdsAndStreamCombinations,
+        bool *earlyExit) {
+    binder::Status bStatus = binder::Status::ok();
+    std::vector<CameraIdAndStreamCombination> halCameraIdsAndStreamsV;
+    bool shouldExit = false;
+    status_t res = OK;
+    for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
+        const std::string& cameraId = cameraIdAndSessionConfig.mCameraId;
+        hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
+        CameraMetadata deviceInfo;
+        bool overrideForPerfClass =
+                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+                        perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        if (res != OK) {
+            return res;
+        }
+        camera3::metadataGetter getMetadata =
+                [this](const String8 &id, bool overrideForPerfClass) {
+                    CameraMetadata physicalDeviceInfo;
+                    mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
+                                                   &physicalDeviceInfo);
+                    return physicalDeviceInfo;
+                };
+        std::vector<std::string> physicalCameraIds;
+        mManager->isLogicalCameraLocked(cameraId, &physicalCameraIds);
+        bStatus =
+            SessionConfigurationUtils::convertToHALStreamCombination(
+                    cameraIdAndSessionConfig.mSessionConfiguration,
+                    String8(cameraId.c_str()), deviceInfo, getMetadata,
+                    physicalCameraIds, streamConfiguration,
+                    overrideForPerfClass, &shouldExit);
+        if (!bStatus.isOk()) {
+            ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+        if (shouldExit) {
+            *earlyExit = true;
+            return OK;
+        }
+        CameraIdAndStreamCombination halCameraIdAndStream;
+        halCameraIdAndStream.cameraId = cameraId;
+        halCameraIdAndStream.streamConfiguration = streamConfiguration;
+        halCameraIdsAndStreamsV.push_back(halCameraIdAndStream);
+    }
+    *halCameraIdsAndStreamCombinations = halCameraIdsAndStreamsV;
+    return OK;
+}
+
+status_t HidlProviderInfo::isConcurrentSessionConfigurationSupported(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion, bool *isSupported) {
+
+      hardware::hidl_vec<CameraIdAndStreamCombination> halCameraIdsAndStreamCombinations;
+      bool knowUnsupported = false;
+      status_t res = convertToHALStreamCombinationAndCameraIdsLocked(
+              cameraIdsAndSessionConfigs, perfClassPrimaryCameraIds,
+              targetSdkVersion, &halCameraIdsAndStreamCombinations, &knowUnsupported);
+      if (res != OK) {
+          ALOGE("%s unable to convert session configurations provided to HAL stream"
+                "combinations", __FUNCTION__);
+          return res;
+      }
+      if (knowUnsupported) {
+          // We got to know the streams aren't valid before doing the HAL
+          // call itself.
+          *isSupported = false;
+          return OK;
+      }
+
+    if (mMinorVersion >= 6) {
+        // Check if the provider is currently active - not going to start it for this notification
+        auto interface = mSavedInterface != nullptr ? mSavedInterface : mActiveInterface.promote();
+        if (interface == nullptr) {
+            // TODO: This might be some other problem
+            return INVALID_OPERATION;
+        }
+        auto castResult2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
+        auto castResult2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+        Status callStatus;
+        auto cb =
+                [&isSupported, &callStatus](Status s, bool supported) {
+                      callStatus = s;
+                      *isSupported = supported; };
+
+        ::android::hardware::Return<void> ret;
+        sp<provider::V2_7::ICameraProvider> interface_2_7;
+        sp<provider::V2_6::ICameraProvider> interface_2_6;
+        if (mMinorVersion >= 7 && castResult2_7.isOk()) {
+            interface_2_7 = castResult2_7;
+            if (interface_2_7 != nullptr) {
+                ret = interface_2_7->isConcurrentStreamCombinationSupported_2_7(
+                        halCameraIdsAndStreamCombinations, cb);
+            }
+        } else if (mMinorVersion == 6 && castResult2_6.isOk()) {
+            interface_2_6 = castResult2_6;
+            if (interface_2_6 != nullptr) {
+                hardware::hidl_vec<provider::V2_6::CameraIdAndStreamCombination>
+                        halCameraIdsAndStreamCombinations_2_6;
+                size_t numStreams = halCameraIdsAndStreamCombinations.size();
+                halCameraIdsAndStreamCombinations_2_6.resize(numStreams);
+                for (size_t i = 0; i < numStreams; i++) {
+                    using namespace camera3;
+                    auto const& combination = halCameraIdsAndStreamCombinations[i];
+                    halCameraIdsAndStreamCombinations_2_6[i].cameraId = combination.cameraId;
+                    bool success =
+                            SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+                                    halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+                                    combination.streamConfiguration);
+                    if (!success) {
+                        *isSupported = false;
+                        return OK;
+                    }
+                }
+                ret = interface_2_6->isConcurrentStreamCombinationSupported(
+                        halCameraIdsAndStreamCombinations_2_6, cb);
+            }
+        }
+
+        if (interface_2_7 != nullptr || interface_2_6 != nullptr) {
+            if (ret.isOk()) {
+                switch (callStatus) {
+                    case Status::OK:
+                        // Expected case, do nothing.
+                        res = OK;
+                        break;
+                    case Status::METHOD_NOT_SUPPORTED:
+                        res = INVALID_OPERATION;
+                        break;
+                    default:
+                        ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
+                                  callStatus);
+                        res = UNKNOWN_ERROR;
+                }
+            } else {
+                ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
+                res = UNKNOWN_ERROR;
+            }
+            return res;
+        }
+    }
+    // unsupported operation
+    return INVALID_OPERATION;
+}
+
+} //namespace android
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
new file mode 100644
index 0000000..e0f1646
--- /dev/null
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_HIDLPROVIDERINFOH
+#define ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_HIDLPROVIDERINFOH
+
+#include "common/CameraProviderManager.h"
+
+namespace android {
+
+struct HidlProviderInfo : public CameraProviderManager::ProviderInfo,
+            virtual public hardware::camera::provider::V2_6::ICameraProviderCallback,
+            virtual public hardware::hidl_death_recipient {
+    // Current overall Android device physical status
+    hardware::hidl_bitfield<hardware::camera::provider::V2_5::DeviceState> mDeviceState;
+
+    // This pointer is used to keep a reference to the ICameraProvider that was last accessed.
+    wp<hardware::camera::provider::V2_4::ICameraProvider> mActiveInterface;
+
+    sp<hardware::camera::provider::V2_4::ICameraProvider> mSavedInterface;
+    HidlProviderInfo(
+            const std::string &providerName,
+            const std::string &providerInstance,
+            CameraProviderManager *manager) :
+            CameraProviderManager::ProviderInfo(providerName, providerInstance, manager) {}
+
+    virtual ~HidlProviderInfo() {}
+
+    static status_t mapToStatusT(const hardware::camera::common::V1_0::Status &status);
+
+    status_t initializeHidlProvider(
+            sp<hardware::camera::provider::V2_4::ICameraProvider>& interface,
+            int64_t currentDeviceState);
+
+    IPCTransport getIPCTransport() override {return IPCTransport::HIDL;}
+
+    const sp<hardware::camera::provider::V2_4::ICameraProvider> startProviderInterface();
+
+    virtual bool successfullyStartedProviderInterface() override;
+
+    virtual int64_t getDeviceState() override {return mDeviceState;};
+
+    virtual status_t setUpVendorTags() override;
+    virtual status_t notifyDeviceStateChange(int64_t) override;
+
+    /**
+     * Query the camera provider for concurrent stream configuration support
+     */
+    virtual status_t isConcurrentSessionConfigurationSupported(
+        const std::vector<CameraIdAndSessionConfiguration> &cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion, bool *isSupported) override;
+
+    // Helper for initializeDeviceInfo to use the right CameraProvider get method.
+    sp<hardware::camera::device::V3_2::ICameraDevice>
+            startDeviceInterface(const std::string &deviceName);
+
+    // ICameraProviderCallbacks interface - these lock the parent mInterfaceMutex
+    hardware::Return<void> cameraDeviceStatusChange(
+            const hardware::hidl_string& ,
+            hardware::camera::common::V1_0::CameraDeviceStatus ) override;
+    hardware::Return<void> torchModeStatusChange(
+            const hardware::hidl_string& ,
+            hardware::camera::common::V1_0::TorchModeStatus ) override;
+    hardware::Return<void> physicalCameraDeviceStatusChange(
+            const hardware::hidl_string& ,
+            const hardware::hidl_string& ,
+            hardware::camera::common::V1_0::CameraDeviceStatus ) override;
+
+    // hidl_death_recipient interface - this locks the parent mInterfaceMutex
+    virtual void serviceDied(uint64_t , const wp<hidl::base::V1_0::IBase>& ) override;
+
+    struct HidlDeviceInfo3 : public CameraProviderManager::ProviderInfo::DeviceInfo3 {
+
+        const hardware::hidl_version mVersion = hardware::hidl_version{3, 2};
+        sp<IBase> mSavedInterface = nullptr;
+
+        HidlDeviceInfo3(const std::string& , const metadata_vendor_id_t ,
+                const std::string &, uint16_t ,
+                const CameraResourceCost& ,
+                sp<ProviderInfo> ,
+                const std::vector<std::string>& ,
+                sp<hardware::camera::device::V3_2::ICameraDevice>);
+
+        ~HidlDeviceInfo3() {}
+
+        virtual status_t setTorchMode(bool enabled) override;
+        virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) override;
+        virtual status_t getTorchStrengthLevel(int32_t *torchStrength) override;
+
+        virtual status_t dumpState(int fd) override;
+
+        virtual status_t isSessionConfigurationSupported(
+                const SessionConfiguration &/*configuration*/,
+                bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+                bool *status/*status*/);
+        sp<hardware::camera::device::V3_2::ICameraDevice> startDeviceInterface();
+    };
+
+ private:
+
+    virtual std::unique_ptr<DeviceInfo> initializeDeviceInfo(const std::string &,
+            const metadata_vendor_id_t , const std::string &,
+            uint16_t ) override;
+    virtual status_t reCacheConcurrentStreamingCameraIdsLocked() override;
+
+    //Expects to have mLock locked
+    status_t getConcurrentCameraIdsInternalLocked(
+            sp<hardware::camera::provider::V2_6::ICameraProvider> &);
+
+    //expects to have mManager->mInterfaceMutex locked
+    status_t convertToHALStreamCombinationAndCameraIdsLocked(
+        const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>&
+                cameraIdsAndSessionConfigs,
+        const std::set<std::string>& perfClassPrimaryCameraIds,
+        int targetSdkVersion,
+        hardware::hidl_vec<hardware::camera::provider::V2_7::CameraIdAndStreamCombination>*
+                halCameraIdsAndStreamCombinations,
+        bool *earlyExit);
+}; // HidlProviderInfo
+
+} // namespace android
+#endif
diff --git a/services/camera/libcameraservice/device3/BufferUtils.cpp b/services/camera/libcameraservice/device3/BufferUtils.cpp
index f3adf20..c0d47d5 100644
--- a/services/camera/libcameraservice/device3/BufferUtils.cpp
+++ b/services/camera/libcameraservice/device3/BufferUtils.cpp
@@ -28,16 +28,6 @@
 namespace android {
 namespace camera3 {
 
-camera_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status) {
-    using hardware::camera::device::V3_2::BufferStatus;
-
-    switch (status) {
-        case BufferStatus::OK: return CAMERA_BUFFER_STATUS_OK;
-        case BufferStatus::ERROR: return CAMERA_BUFFER_STATUS_ERROR;
-    }
-    return CAMERA_BUFFER_STATUS_ERROR;
-}
-
 void BufferRecords::takeInflightBufferMap(BufferRecords& other) {
     std::lock_guard<std::mutex> oLock(other.mInflightLock);
     std::lock_guard<std::mutex> lock(mInflightLock);
diff --git a/services/camera/libcameraservice/device3/BufferUtils.h b/services/camera/libcameraservice/device3/BufferUtils.h
index 03112ec..96fc111 100644
--- a/services/camera/libcameraservice/device3/BufferUtils.h
+++ b/services/camera/libcameraservice/device3/BufferUtils.h
@@ -154,9 +154,6 @@
     }; // class BufferRecords
 
     static const uint64_t BUFFER_ID_NO_BUFFER = 0;
-
-    camera_buffer_status_t mapHidlBufferStatus(
-            hardware::camera::device::V3_2::BufferStatus status);
 } // namespace camera3
 
 } // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index de418da..7c2f34f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -70,8 +70,6 @@
 
 using namespace android::camera3;
 using namespace android::hardware::camera;
-using namespace android::hardware::camera::device::V3_2;
-using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
 
@@ -84,6 +82,7 @@
         mStatusWaiters(0),
         mUsePartialResult(false),
         mNumPartialResults(1),
+        mDeviceTimeBaseIsRealtime(false),
         mTimestampOffset(0),
         mNextResultFrameNumber(0),
         mNextReprocessResultFrameNumber(0),
@@ -112,166 +111,12 @@
     return mId;
 }
 
-status_t Camera3Device::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
-    ATRACE_CALL();
-    Mutex::Autolock il(mInterfaceLock);
-    Mutex::Autolock l(mLock);
-
-    ALOGV("%s: Initializing HIDL device for camera %s", __FUNCTION__, mId.string());
-    if (mStatus != STATUS_UNINITIALIZED) {
-        CLOGE("Already initialized!");
-        return INVALID_OPERATION;
-    }
-    if (manager == nullptr) return INVALID_OPERATION;
-
-    sp<ICameraDeviceSession> session;
-    ATRACE_BEGIN("CameraHal::openSession");
-    status_t res = manager->openSession(mId.string(), this,
-            /*out*/ &session);
-    ATRACE_END();
-    if (res != OK) {
-        SET_ERR_L("Could not open camera session: %s (%d)", strerror(-res), res);
-        return res;
-    }
-
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
-    if (res != OK) {
-        SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
-        session->close();
-        return res;
-    }
-    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
-
-    std::vector<std::string> physicalCameraIds;
-    bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
-    if (isLogical) {
-        for (auto& physicalId : physicalCameraIds) {
-            // Do not override characteristics for physical cameras
-            res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
-            if (res != OK) {
-                SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
-                        physicalId.c_str(), strerror(-res), res);
-                session->close();
-                return res;
-            }
-
-            bool usePrecorrectArray =
-                    DistortionMapper::isDistortionSupported(mPhysicalDeviceInfoMap[physicalId]);
-            if (usePrecorrectArray) {
-                res = mDistortionMappers[physicalId].setupStaticInfo(
-                        mPhysicalDeviceInfoMap[physicalId]);
-                if (res != OK) {
-                    SET_ERR_L("Unable to read camera %s's calibration fields for distortion "
-                            "correction", physicalId.c_str());
-                    session->close();
-                    return res;
-                }
-            }
-
-            mZoomRatioMappers[physicalId] = ZoomRatioMapper(
-                    &mPhysicalDeviceInfoMap[physicalId],
-                    mSupportNativeZoomRatio, usePrecorrectArray);
-
-            if (SessionConfigurationUtils::isUltraHighResolutionSensor(
-                    mPhysicalDeviceInfoMap[physicalId])) {
-                mUHRCropAndMeteringRegionMappers[physicalId] =
-                        UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
-                                usePrecorrectArray);
-            }
-        }
-    }
-
-    std::shared_ptr<RequestMetadataQueue> queue;
-    auto requestQueueRet = session->getCaptureRequestMetadataQueue(
-        [&queue](const auto& descriptor) {
-            queue = std::make_shared<RequestMetadataQueue>(descriptor);
-            if (!queue->isValid() || queue->availableToWrite() <= 0) {
-                ALOGE("HAL returns empty request metadata fmq, not use it");
-                queue = nullptr;
-                // don't use the queue onwards.
-            }
-        });
-    if (!requestQueueRet.isOk()) {
-        ALOGE("Transaction error when getting request metadata fmq: %s, not use it",
-                requestQueueRet.description().c_str());
-        return DEAD_OBJECT;
-    }
-
-    std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
-    auto resultQueueRet = session->getCaptureResultMetadataQueue(
-        [&resQueue](const auto& descriptor) {
-            resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
-            if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
-                ALOGE("HAL returns empty result metadata fmq, not use it");
-                resQueue = nullptr;
-                // Don't use the resQueue onwards.
-            }
-        });
-    if (!resultQueueRet.isOk()) {
-        ALOGE("Transaction error when getting result metadata queue from camera session: %s",
-                resultQueueRet.description().c_str());
-        return DEAD_OBJECT;
-    }
-    IF_ALOGV() {
-        session->interfaceChain([](
-            ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) {
-                ALOGV("Session interface chain:");
-                for (const auto& iface : interfaceChain) {
-                    ALOGV("  %s", iface.c_str());
-                }
-            });
-    }
-
-    camera_metadata_entry bufMgrMode =
-            mDeviceInfo.find(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION);
-    if (bufMgrMode.count > 0) {
-         mUseHalBufManager = (bufMgrMode.data.u8[0] ==
-            ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
-    }
-
-    camera_metadata_entry_t capabilities = mDeviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
-    for (size_t i = 0; i < capabilities.count; i++) {
-        uint8_t capability = capabilities.data.u8[i];
-        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING) {
-            mSupportOfflineProcessing = true;
-        }
-    }
-
-    mInterface = new HalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
-    std::string providerType;
-    mVendorTagId = manager->getProviderTagIdLocked(mId.string());
-    mTagMonitor.initialize(mVendorTagId);
-    if (!monitorTags.isEmpty()) {
-        mTagMonitor.parseTagsToMonitor(String8(monitorTags));
-    }
-
-    // Metadata tags needs fixup for monochrome camera device version less
-    // than 3.5.
-    hardware::hidl_version maxVersion{0,0};
-    res = manager->getHighestSupportedVersion(mId.string(), &maxVersion);
-    if (res != OK) {
-        ALOGE("%s: Error in getting camera device version id: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return res;
-    }
-    int deviceVersion = HARDWARE_DEVICE_API_VERSION(
-            maxVersion.get_major(), maxVersion.get_minor());
-
-    bool isMonochrome = false;
-    for (size_t i = 0; i < capabilities.count; i++) {
-        uint8_t capability = capabilities.data.u8[i];
-        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
-            isMonochrome = true;
-        }
-    }
-    mNeedFixupMonochromeTags = (isMonochrome && deviceVersion < CAMERA_DEVICE_API_VERSION_3_5);
-
-    return initializeCommonLocked();
-}
-
 status_t Camera3Device::initializeCommonLocked() {
 
+    /** Start watchdog thread */
+    mCameraServiceWatchdog = new CameraServiceWatchdog();
+    mCameraServiceWatchdog->run("CameraServiceWatchdog");
+
     /** Start up status tracker thread */
     mStatusTracker = new StatusTracker(this);
     status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
@@ -323,7 +168,7 @@
     }
 
     /** Start up request queue thread */
-    mRequestThread = new RequestThread(
+    mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
             mUseHalBufManager, mSupportCameraMute);
     res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
@@ -345,11 +190,12 @@
     mIsInputStreamMultiResolution = false;
 
     // Measure the clock domain offset between camera and video/hw_composer
+    mTimestampOffset = getMonoToBoottimeOffset();
     camera_metadata_entry timestampSource =
             mDeviceInfo.find(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE);
     if (timestampSource.count > 0 && timestampSource.data.u8[0] ==
             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
-        mTimestampOffset = getMonoToBoottimeOffset();
+        mDeviceTimeBaseIsRealtime = true;
     }
 
     // Will the HAL be sending in early partial result metadata?
@@ -381,7 +227,8 @@
         mRotateAndCropMappers.emplace(mId.c_str(), &mDeviceInfo);
     }
 
-    mInjectionMethods = new Camera3DeviceInjectionMethods(this);
+    // Hidl/AidlCamera3DeviceInjectionMethods
+    mInjectionMethods = createCamera3DeviceInjectionMethods(this);
 
     return OK;
 }
@@ -471,7 +318,7 @@
 
         // Call close without internal mutex held, as the HAL close may need to
         // wait on assorted callbacks,etc, to complete before it can return.
-        interface->close();
+        mCameraServiceWatchdog->WATCH(interface->close());
 
         flushInflightRequests();
 
@@ -494,6 +341,12 @@
         }
     }
     ALOGI("%s: X", __FUNCTION__);
+
+    if (mCameraServiceWatchdog != NULL) {
+        mCameraServiceWatchdog->requestExit();
+        mCameraServiceWatchdog.clear();
+    }
+
     return res;
 }
 
@@ -531,77 +384,6 @@
     return measured;
 }
 
-hardware::graphics::common::V1_0::PixelFormat Camera3Device::mapToPixelFormat(
-        int frameworkFormat) {
-    return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
-}
-
-DataspaceFlags Camera3Device::mapToHidlDataspace(
-        android_dataspace dataSpace) {
-    return dataSpace;
-}
-
-BufferUsageFlags Camera3Device::mapToConsumerUsage(
-        uint64_t usage) {
-    return usage;
-}
-
-StreamRotation Camera3Device::mapToStreamRotation(camera_stream_rotation_t rotation) {
-    switch (rotation) {
-        case CAMERA_STREAM_ROTATION_0:
-            return StreamRotation::ROTATION_0;
-        case CAMERA_STREAM_ROTATION_90:
-            return StreamRotation::ROTATION_90;
-        case CAMERA_STREAM_ROTATION_180:
-            return StreamRotation::ROTATION_180;
-        case CAMERA_STREAM_ROTATION_270:
-            return StreamRotation::ROTATION_270;
-    }
-    ALOGE("%s: Unknown stream rotation %d", __FUNCTION__, rotation);
-    return StreamRotation::ROTATION_0;
-}
-
-status_t Camera3Device::mapToStreamConfigurationMode(
-        camera_stream_configuration_mode_t operationMode, StreamConfigurationMode *mode) {
-    if (mode == nullptr) return BAD_VALUE;
-    if (operationMode < CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START) {
-        switch(operationMode) {
-            case CAMERA_STREAM_CONFIGURATION_NORMAL_MODE:
-                *mode = StreamConfigurationMode::NORMAL_MODE;
-                break;
-            case CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
-                *mode = StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE;
-                break;
-            default:
-                ALOGE("%s: Unknown stream configuration mode %d", __FUNCTION__, operationMode);
-                return BAD_VALUE;
-        }
-    } else {
-        *mode = static_cast<StreamConfigurationMode>(operationMode);
-    }
-    return OK;
-}
-
-int Camera3Device::mapToFrameworkFormat(
-        hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
-    return static_cast<uint32_t>(pixelFormat);
-}
-
-android_dataspace Camera3Device::mapToFrameworkDataspace(
-        DataspaceFlags dataSpace) {
-    return static_cast<android_dataspace>(dataSpace);
-}
-
-uint64_t Camera3Device::mapConsumerToFrameworkUsage(
-        BufferUsageFlags usage) {
-    return usage;
-}
-
-uint64_t Camera3Device::mapProducerToFrameworkUsage(
-        BufferUsageFlags usage) {
-    return usage;
-}
-
 ssize_t Camera3Device::getJpegBufferSize(const CameraMetadata &info, uint32_t width,
         uint32_t height) const {
     // Get max jpeg size (area-wise) for default sensor pixel mode
@@ -754,10 +536,9 @@
     }
     lines.appendFormat("    Stream configuration:\n");
     const char *mode =
-            mOperatingMode == static_cast<int>(StreamConfigurationMode::NORMAL_MODE) ? "NORMAL" :
-            mOperatingMode == static_cast<int>(
-                StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE) ? "CONSTRAINED_HIGH_SPEED" :
-            "CUSTOM";
+            mOperatingMode == CAMERA_STREAM_CONFIGURATION_NORMAL_MODE ? "NORMAL" :
+            mOperatingMode == CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE ?
+                    "CONSTRAINED_HIGH_SPEED" : "CUSTOM";
     lines.appendFormat("    Operation mode: %s (%d) \n", mode, mOperatingMode);
 
     if (mInputStream != NULL) {
@@ -850,6 +631,21 @@
     return OK;
 }
 
+status_t Camera3Device::startWatchingTags(const String8 &tags) {
+    mTagMonitor.parseTagsToMonitor(tags);
+    return OK;
+}
+
+status_t Camera3Device::stopWatchingTags() {
+    mTagMonitor.disableMonitoring();
+    return OK;
+}
+
+status_t Camera3Device::dumpWatchedEventsToVector(std::vector<std::string> &out) {
+    mTagMonitor.getLatestMonitoredTagEvents(out);
+    return OK;
+}
+
 const CameraMetadata& Camera3Device::infoPhysical(const String8& physicalId) const {
     ALOGVV("%s: E", __FUNCTION__);
     if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED ||
@@ -1029,176 +825,6 @@
     return res;
 }
 
-hardware::Return<void> Camera3Device::requestStreamBuffers(
-        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
-        requestStreamBuffers_cb _hidl_cb) {
-    RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
-        *this, *mInterface, *this};
-    camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3Device::returnStreamBuffers(
-        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
-    ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, *mInterface};
-    camera3::returnStreamBuffers(states, buffers);
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3Device::processCaptureResult_3_4(
-        const hardware::hidl_vec<
-                hardware::camera::device::V3_4::CaptureResult>& results) {
-    // Ideally we should grab mLock, but that can lead to deadlock, and
-    // it's not super important to get up to date value of mStatus for this
-    // warning print, hence skipping the lock here
-    if (mStatus == STATUS_ERROR) {
-        // Per API contract, HAL should act as closed after device error
-        // But mStatus can be set to error by framework as well, so just log
-        // a warning here.
-        ALOGW("%s: received capture result in error state.", __FUNCTION__);
-    }
-
-    sp<NotificationListener> listener;
-    {
-        std::lock_guard<std::mutex> l(mOutputLock);
-        listener = mListener.promote();
-    }
-
-    if (mProcessCaptureResultLock.tryLock() != OK) {
-        // This should never happen; it indicates a wrong client implementation
-        // that doesn't follow the contract. But, we can be tolerant here.
-        ALOGE("%s: callback overlapped! waiting 1s...",
-                __FUNCTION__);
-        if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
-            ALOGE("%s: cannot acquire lock in 1s, dropping results",
-                    __FUNCTION__);
-            // really don't know what to do, so bail out.
-            return hardware::Void();
-        }
-    }
-    CaptureOutputStates states {
-        mId,
-        mInFlightLock, mLastCompletedRegularFrameNumber,
-        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-        mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
-        mNextShutterFrameNumber,
-        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-        mNextResultFrameNumber,
-        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
-        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
-        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient
-    };
-
-    for (const auto& result : results) {
-        processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
-    }
-    mProcessCaptureResultLock.unlock();
-    return hardware::Void();
-}
-
-// Only one processCaptureResult should be called at a time, so
-// the locks won't block. The locks are present here simply to enforce this.
-hardware::Return<void> Camera3Device::processCaptureResult(
-        const hardware::hidl_vec<
-                hardware::camera::device::V3_2::CaptureResult>& results) {
-    hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
-
-    // Ideally we should grab mLock, but that can lead to deadlock, and
-    // it's not super important to get up to date value of mStatus for this
-    // warning print, hence skipping the lock here
-    if (mStatus == STATUS_ERROR) {
-        // Per API contract, HAL should act as closed after device error
-        // But mStatus can be set to error by framework as well, so just log
-        // a warning here.
-        ALOGW("%s: received capture result in error state.", __FUNCTION__);
-    }
-
-    sp<NotificationListener> listener;
-    {
-        std::lock_guard<std::mutex> l(mOutputLock);
-        listener = mListener.promote();
-    }
-
-    if (mProcessCaptureResultLock.tryLock() != OK) {
-        // This should never happen; it indicates a wrong client implementation
-        // that doesn't follow the contract. But, we can be tolerant here.
-        ALOGE("%s: callback overlapped! waiting 1s...",
-                __FUNCTION__);
-        if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
-            ALOGE("%s: cannot acquire lock in 1s, dropping results",
-                    __FUNCTION__);
-            // really don't know what to do, so bail out.
-            return hardware::Void();
-        }
-    }
-
-    CaptureOutputStates states {
-        mId,
-        mInFlightLock, mLastCompletedRegularFrameNumber,
-        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-        mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
-        mNextShutterFrameNumber,
-        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-        mNextResultFrameNumber,
-        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
-        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
-        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient
-    };
-
-    for (const auto& result : results) {
-        processOneCaptureResultLocked(states, result, noPhysMetadata);
-    }
-    mProcessCaptureResultLock.unlock();
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3Device::notify(
-        const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
-    // Ideally we should grab mLock, but that can lead to deadlock, and
-    // it's not super important to get up to date value of mStatus for this
-    // warning print, hence skipping the lock here
-    if (mStatus == STATUS_ERROR) {
-        // Per API contract, HAL should act as closed after device error
-        // But mStatus can be set to error by framework as well, so just log
-        // a warning here.
-        ALOGW("%s: received notify message in error state.", __FUNCTION__);
-    }
-
-    sp<NotificationListener> listener;
-    {
-        std::lock_guard<std::mutex> l(mOutputLock);
-        listener = mListener.promote();
-    }
-
-    CaptureOutputStates states {
-        mId,
-        mInFlightLock, mLastCompletedRegularFrameNumber,
-        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-        mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
-        mNextShutterFrameNumber,
-        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-        mNextResultFrameNumber,
-        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
-        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
-        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient
-    };
-    for (const auto& msg : msgs) {
-        camera3::notify(states, msg);
-    }
-    return hardware::Void();
-}
-
 status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
                                     const std::list<const SurfaceMap> &surfaceMaps,
                                     int64_t *lastFrameNumber) {
@@ -1359,7 +985,8 @@
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
-            uint64_t consumerUsage) {
+            uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
+            int timestampBase, int mirrorMode) {
     ATRACE_CALL();
 
     if (consumer == nullptr) {
@@ -1372,7 +999,8 @@
 
     return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
             format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
-            streamSetId, isShared, isMultiResolution, consumerUsage);
+            streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
+            streamUseCase, timestampBase, mirrorMode);
 }
 
 static bool isRawFormat(int format) {
@@ -1392,16 +1020,20 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
         const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
-        uint64_t consumerUsage) {
+        uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
+        int timestampBase, int mirrorMode) {
     ATRACE_CALL();
 
     Mutex::Autolock il(mInterfaceLock);
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
-            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
+            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
+            " dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
+            " mirrorMode %d",
             mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
-            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
+            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
+            dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
 
     status_t res;
     bool wasActive = false;
@@ -1450,6 +1082,7 @@
         ALOGE("%s: RAW opaque stream cannot be used with > 1 sensor pixel modes", __FUNCTION__);
         return BAD_VALUE;
     }
+    IPCTransport transport = getTransportType();
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ssize_t blobBufferSize;
         if (dataSpace == HAL_DATASPACE_DEPTH) {
@@ -1469,8 +1102,9 @@
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, blobBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
+                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+                timestampBase, mirrorMode);
     } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
         bool maxResolution =
                 sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1483,23 +1117,27 @@
         }
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
+                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+                timestampBase, mirrorMode);
     } else if (isShared) {
         newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                mUseHalBufManager);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
+                mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+                timestampBase, mirrorMode);
     } else if (consumers.size() == 0 && hasDeferredConsumer) {
         newStream = new Camera3OutputStream(mNextStreamId,
                 width, height, format, consumerUsage, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
+                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+                timestampBase, mirrorMode);
     } else {
         newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                 width, height, format, dataSpace, rotation,
-                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
-                isMultiResolution);
+                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
+                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+                timestampBase, mirrorMode);
     }
 
     size_t consumerCount = consumers.size();
@@ -1586,6 +1224,7 @@
     streamInfo->originalFormat = stream->getOriginalFormat();
     streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
     streamInfo->originalDataSpace = stream->getOriginalDataSpace();
+    streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
     return OK;
 }
 
@@ -1617,7 +1256,7 @@
         CLOGE("Stream %d does not exist", id);
         return BAD_VALUE;
     }
-    return stream->setTransform(transform);
+    return stream->setTransform(transform, false /*mayChangeMirror*/);
 }
 
 status_t Camera3Device::deleteStream(int id) {
@@ -1918,13 +1557,22 @@
     }
 
     bool stateSeen = false;
+    nsecs_t startTime = systemTime();
     do {
         if (active == (mStatus == STATUS_ACTIVE)) {
             // Desired state is current
             break;
         }
 
-        res = mStatusChanged.waitRelative(mLock, timeout);
+        nsecs_t timeElapsed = systemTime() - startTime;
+        nsecs_t timeToWait = timeout - timeElapsed;
+        if (timeToWait <= 0) {
+            // Thread woke up spuriously but has timed out since.
+            // Force out of loop with TIMED_OUT result.
+            res = TIMED_OUT;
+            break;
+        }
+        res = mStatusChanged.waitRelative(mLock, timeToWait);
         if (res != OK) break;
 
         // This is impossible, but if not, could result in subtle deadlocks and invalid state
@@ -2088,7 +1736,12 @@
         mSessionStatsBuilder.stopCounter();
     }
 
-    return mRequestThread->flush();
+    // Calculate expected duration for flush with additional buffer time in ms for watchdog
+    uint64_t maxExpectedDuration = (getExpectedInFlightDuration() + kBaseGetBufferWait) / 1e6;
+    status_t res = mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(mRequestThread->flush(),
+            maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
+
+    return res;
 }
 
 status_t Camera3Device::prepare(int streamId) {
@@ -2157,6 +1810,20 @@
     return OK;
 }
 
+float Camera3Device::getMaxPreviewFps(sp<camera3::Camera3OutputStreamInterface> stream) {
+    camera_metadata_entry minDurations =
+            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+    for (size_t i = 0; i < minDurations.count; i += 4) {
+        if (minDurations.data.i64[i] == stream->getFormat()
+                && minDurations.data.i64[i+1] == stream->getWidth()
+                && minDurations.data.i64[i+2] == stream->getHeight()) {
+            int64_t minFrameDuration = minDurations.data.i64[i+3];
+            return 1e9f / minFrameDuration;
+        }
+    }
+    return 0.0f;
+}
+
 /**
  * Methods called by subclasses
  */
@@ -2165,6 +1832,7 @@
     ATRACE_CALL();
     std::vector<int> streamIds;
     std::vector<hardware::CameraStreamStats> streamStats;
+    float sessionMaxPreviewFps = 0.0f;
 
     {
         // Need mLock to safely update state and synchronize to current
@@ -2184,21 +1852,28 @@
         // state changes
         if (mPauseStateNotify) return;
 
-        // Populate stream statistics in case of Idle
-        if (idle) {
-            for (size_t i = 0; i < mOutputStreams.size(); i++) {
-                auto stream = mOutputStreams[i];
-                if (stream.get() == nullptr) continue;
+        for (size_t i = 0; i < mOutputStreams.size(); i++) {
+            auto stream = mOutputStreams[i];
+            if (stream.get() == nullptr) continue;
+
+            float streamMaxPreviewFps = getMaxPreviewFps(stream);
+            sessionMaxPreviewFps = std::max(sessionMaxPreviewFps, streamMaxPreviewFps);
+
+            // Populate stream statistics in case of Idle
+            if (idle) {
                 streamIds.push_back(stream->getId());
                 Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
                 int64_t usage = 0LL;
+                int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
                 if (camera3Stream != nullptr) {
                     usage = camera3Stream->getUsage();
+                    streamUseCase = camera3Stream->getStreamUseCase();
                 }
                 streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
-                    stream->getFormat(), stream->getDataSpace(), usage,
+                    stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
                     stream->getMaxHalBuffers(),
-                    stream->getMaxTotalBuffers() - stream->getMaxHalBuffers());
+                    stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
+                    stream->getDynamicRangeProfile(), streamUseCase);
             }
         }
     }
@@ -2236,7 +1911,7 @@
             }
             listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
         } else {
-            res = listener->notifyActive();
+            res = listener->notifyActive(sessionMaxPreviewFps);
         }
     }
     if (res != OK) {
@@ -2608,8 +2283,7 @@
     }
 
     bool isConstrainedHighSpeed =
-            static_cast<int>(StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE) ==
-            operatingMode;
+            CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == operatingMode;
 
     if (mOperatingMode != operatingMode) {
         mNeedConfig = true;
@@ -2617,6 +2291,9 @@
         mOperatingMode = operatingMode;
     }
 
+    // Reset min expected duration when session is reconfigured.
+    mMinExpectedDuration = 0;
+
     // In case called from configureStreams, abort queued input buffers not belonging to
     // any pending requests.
     if (mInputStream != NULL && notifyRequestThread) {
@@ -2849,17 +2526,28 @@
         mRequestBufferSM.onStreamsConfigured();
     }
 
+    // First call injectCamera() and then run configureStreamsLocked() case:
     // Since the streams configuration of the injection camera is based on the internal camera, we
-    // must wait until the internal camera configure streams before calling injectCamera() to
+    // must wait until the internal camera configure streams before running the injection job to
     // configure the injection streams.
     if (mInjectionMethods->isInjecting()) {
-        ALOGV("%s: Injection camera %s: Start to configure streams.",
+        ALOGD("%s: Injection camera %s: Start to configure streams.",
               __FUNCTION__, mInjectionMethods->getInjectedCamId().string());
         res = mInjectionMethods->injectCamera(config, bufferSizes);
         if (res != OK) {
             ALOGE("Can't finish inject camera process!");
             return res;
         }
+    } else {
+        // First run configureStreamsLocked() and then call injectCamera() case:
+        // If the stream configuration has been completed and camera deive is active, but the
+        // injection camera has not been injected yet, we need to store the stream configuration of
+        // the internal camera (because the stream configuration of the injection camera is based
+        // on the internal camera). When injecting occurs later, this configuration can be used by
+        // the injection camera.
+        ALOGV("%s: The stream configuration is complete and the camera device is active, but the"
+              " injection camera has not been injected yet.", __FUNCTION__);
+        mInjectionMethods->storeInjectionConfig(config, bufferSizes);
     }
 
     return OK;
@@ -2985,7 +2673,7 @@
 
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-        bool hasAppCallback, nsecs_t maxExpectedDuration,
+        bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
         const std::set<std::set<String8>>& physicalCameraIds,
         bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
         const std::set<std::string>& cameraIdsWithZoom,
@@ -2995,8 +2683,9 @@
 
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
-            hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
-            rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs, outputSurfaces));
+            hasAppCallback, minExpectedDuration, maxExpectedDuration, physicalCameraIds,
+            isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
+            outputSurfaces));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -3085,713 +2774,15 @@
 
 void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
         int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
-        const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
+        const std::unordered_map<std::string, CameraMetadata>& physicalMetadata,
+        const camera_stream_buffer_t *outputBuffers, uint32_t numOutputBuffers,
+        int32_t inputStreamId) {
 
     mTagMonitor.monitorMetadata(source, frameNumber, timestamp, metadata,
-            physicalMetadata);
+            physicalMetadata, outputBuffers, numOutputBuffers, inputStreamId);
 }
 
-/**
- * HalInterface inner class methods
- */
-
-Camera3Device::HalInterface::HalInterface(
-            sp<ICameraDeviceSession> &session,
-            std::shared_ptr<RequestMetadataQueue> queue,
-            bool useHalBufManager, bool supportOfflineProcessing) :
-        mHidlSession(session),
-        mRequestMetadataQueue(queue),
-        mUseHalBufManager(useHalBufManager),
-        mIsReconfigurationQuerySupported(true),
-        mSupportOfflineProcessing(supportOfflineProcessing) {
-    // Check with hardware service manager if we can downcast these interfaces
-    // Somewhat expensive, so cache the results at startup
-    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_7.isOk()) {
-        mHidlSession_3_7 = castResult_3_7;
-    }
-    auto castResult_3_6 = device::V3_6::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_6.isOk()) {
-        mHidlSession_3_6 = castResult_3_6;
-    }
-    auto castResult_3_5 = device::V3_5::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_5.isOk()) {
-        mHidlSession_3_5 = castResult_3_5;
-    }
-    auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_4.isOk()) {
-        mHidlSession_3_4 = castResult_3_4;
-    }
-    auto castResult_3_3 = device::V3_3::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_3.isOk()) {
-        mHidlSession_3_3 = castResult_3_3;
-    }
-}
-
-Camera3Device::HalInterface::HalInterface() :
-        mUseHalBufManager(false),
-        mSupportOfflineProcessing(false) {}
-
-Camera3Device::HalInterface::HalInterface(const HalInterface& other) :
-        mHidlSession(other.mHidlSession),
-        mRequestMetadataQueue(other.mRequestMetadataQueue),
-        mUseHalBufManager(other.mUseHalBufManager),
-        mSupportOfflineProcessing(other.mSupportOfflineProcessing) {}
-
-bool Camera3Device::HalInterface::valid() {
-    return (mHidlSession != nullptr);
-}
-
-void Camera3Device::HalInterface::clear() {
-    mHidlSession_3_7.clear();
-    mHidlSession_3_6.clear();
-    mHidlSession_3_5.clear();
-    mHidlSession_3_4.clear();
-    mHidlSession_3_3.clear();
-    mHidlSession.clear();
-}
-
-status_t Camera3Device::HalInterface::constructDefaultRequestSettings(
-        camera_request_template_t templateId,
-        /*out*/ camera_metadata_t **requestTemplate) {
-    ATRACE_NAME("CameraHal::constructDefaultRequestSettings");
-    if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
-
-    common::V1_0::Status status;
-
-    auto requestCallback = [&status, &requestTemplate]
-            (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
-            status = s;
-            if (status == common::V1_0::Status::OK) {
-                const camera_metadata *r =
-                        reinterpret_cast<const camera_metadata_t*>(request.data());
-                size_t expectedSize = request.size();
-                int ret = validate_camera_metadata_structure(r, &expectedSize);
-                if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
-                    *requestTemplate = clone_camera_metadata(r);
-                    if (*requestTemplate == nullptr) {
-                        ALOGE("%s: Unable to clone camera metadata received from HAL",
-                                __FUNCTION__);
-                        status = common::V1_0::Status::INTERNAL_ERROR;
-                    }
-                } else {
-                    ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
-                    status = common::V1_0::Status::INTERNAL_ERROR;
-                }
-            }
-        };
-    hardware::Return<void> err;
-    RequestTemplate id;
-    switch (templateId) {
-        case CAMERA_TEMPLATE_PREVIEW:
-            id = RequestTemplate::PREVIEW;
-            break;
-        case CAMERA_TEMPLATE_STILL_CAPTURE:
-            id = RequestTemplate::STILL_CAPTURE;
-            break;
-        case CAMERA_TEMPLATE_VIDEO_RECORD:
-            id = RequestTemplate::VIDEO_RECORD;
-            break;
-        case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
-            id = RequestTemplate::VIDEO_SNAPSHOT;
-            break;
-        case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
-            id = RequestTemplate::ZERO_SHUTTER_LAG;
-            break;
-        case CAMERA_TEMPLATE_MANUAL:
-            id = RequestTemplate::MANUAL;
-            break;
-        default:
-            // Unknown template ID, or this HAL is too old to support it
-            return BAD_VALUE;
-    }
-    err = mHidlSession->constructDefaultRequestSettings(id, requestCallback);
-
-    if (!err.isOk()) {
-        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-        res = DEAD_OBJECT;
-    } else {
-        res = CameraProviderManager::mapToStatusT(status);
-    }
-
-    return res;
-}
-
-bool Camera3Device::HalInterface::isReconfigurationRequired(CameraMetadata& oldSessionParams,
-        CameraMetadata& newSessionParams) {
-    // We do reconfiguration by default;
-    bool ret = true;
-    if ((mHidlSession_3_5 != nullptr) && mIsReconfigurationQuerySupported) {
-        android::hardware::hidl_vec<uint8_t> oldParams, newParams;
-        camera_metadata_t* oldSessioMeta = const_cast<camera_metadata_t*>(
-                oldSessionParams.getAndLock());
-        camera_metadata_t* newSessioMeta = const_cast<camera_metadata_t*>(
-                newSessionParams.getAndLock());
-        oldParams.setToExternal(reinterpret_cast<uint8_t*>(oldSessioMeta),
-                get_camera_metadata_size(oldSessioMeta));
-        newParams.setToExternal(reinterpret_cast<uint8_t*>(newSessioMeta),
-                get_camera_metadata_size(newSessioMeta));
-        hardware::camera::common::V1_0::Status callStatus;
-        bool required;
-        auto hidlCb = [&callStatus, &required] (hardware::camera::common::V1_0::Status s,
-                bool requiredFlag) {
-            callStatus = s;
-            required = requiredFlag;
-        };
-        auto err = mHidlSession_3_5->isReconfigurationRequired(oldParams, newParams, hidlCb);
-        oldSessionParams.unlock(oldSessioMeta);
-        newSessionParams.unlock(newSessioMeta);
-        if (err.isOk()) {
-            switch (callStatus) {
-                case hardware::camera::common::V1_0::Status::OK:
-                    ret = required;
-                    break;
-                case hardware::camera::common::V1_0::Status::METHOD_NOT_SUPPORTED:
-                    mIsReconfigurationQuerySupported = false;
-                    ret = true;
-                    break;
-                default:
-                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
-                    ret = true;
-            }
-        } else {
-            ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, err.description().c_str());
-            ret = true;
-        }
-    }
-
-    return ret;
-}
-
-status_t Camera3Device::HalInterface::configureStreams(const camera_metadata_t *sessionParams,
-        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
-    ATRACE_NAME("CameraHal::configureStreams");
-    if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
-
-    if (config->input_is_multi_resolution && mHidlSession_3_7 == nullptr) {
-        ALOGE("%s: Camera device doesn't support multi-resolution input stream", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    // Convert stream config to HIDL
-    std::set<int> activeStreams;
-    device::V3_2::StreamConfiguration requestedConfiguration3_2;
-    device::V3_4::StreamConfiguration requestedConfiguration3_4;
-    device::V3_7::StreamConfiguration requestedConfiguration3_7;
-    requestedConfiguration3_2.streams.resize(config->num_streams);
-    requestedConfiguration3_4.streams.resize(config->num_streams);
-    requestedConfiguration3_7.streams.resize(config->num_streams);
-    for (size_t i = 0; i < config->num_streams; i++) {
-        device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
-        device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
-        device::V3_7::Stream &dst3_7 = requestedConfiguration3_7.streams[i];
-        camera3::camera_stream_t *src = config->streams[i];
-
-        Camera3Stream* cam3stream = Camera3Stream::cast(src);
-        cam3stream->setBufferFreedListener(this);
-        int streamId = cam3stream->getId();
-        StreamType streamType;
-        switch (src->stream_type) {
-            case CAMERA_STREAM_OUTPUT:
-                streamType = StreamType::OUTPUT;
-                break;
-            case CAMERA_STREAM_INPUT:
-                streamType = StreamType::INPUT;
-                break;
-            default:
-                ALOGE("%s: Stream %d: Unsupported stream type %d",
-                        __FUNCTION__, streamId, config->streams[i]->stream_type);
-                return BAD_VALUE;
-        }
-        dst3_2.id = streamId;
-        dst3_2.streamType = streamType;
-        dst3_2.width = src->width;
-        dst3_2.height = src->height;
-        dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
-        dst3_2.rotation = mapToStreamRotation((camera_stream_rotation_t) src->rotation);
-        // For HidlSession version 3.5 or newer, the format and dataSpace sent
-        // to HAL are original, not the overridden ones.
-        if (mHidlSession_3_5 != nullptr) {
-            dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden() ?
-                    cam3stream->getOriginalFormat() : src->format);
-            dst3_2.dataSpace = mapToHidlDataspace(cam3stream->isDataSpaceOverridden() ?
-                    cam3stream->getOriginalDataSpace() : src->data_space);
-        } else {
-            dst3_2.format = mapToPixelFormat(src->format);
-            dst3_2.dataSpace = mapToHidlDataspace(src->data_space);
-        }
-        dst3_4.v3_2 = dst3_2;
-        dst3_4.bufferSize = bufferSizes[i];
-        if (src->physical_camera_id != nullptr) {
-            dst3_4.physicalCameraId = src->physical_camera_id;
-        }
-        dst3_7.v3_4 = dst3_4;
-        dst3_7.groupId = cam3stream->getHalStreamGroupId();
-        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
-        size_t j = 0;
-        for (int mode : src->sensor_pixel_modes_used) {
-            dst3_7.sensorPixelModesUsed[j++] =
-                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
-        }
-        activeStreams.insert(streamId);
-        // Create Buffer ID map if necessary
-        mBufferRecords.tryCreateBufferCache(streamId);
-    }
-    // remove BufferIdMap for deleted streams
-    mBufferRecords.removeInactiveBufferCaches(activeStreams);
-
-    StreamConfigurationMode operationMode;
-    res = mapToStreamConfigurationMode(
-            (camera_stream_configuration_mode_t) config->operation_mode,
-            /*out*/ &operationMode);
-    if (res != OK) {
-        return res;
-    }
-    requestedConfiguration3_2.operationMode = operationMode;
-    requestedConfiguration3_4.operationMode = operationMode;
-    requestedConfiguration3_7.operationMode = operationMode;
-    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
-    requestedConfiguration3_4.sessionParams.setToExternal(
-            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            sessionParamSize);
-    requestedConfiguration3_7.operationMode = operationMode;
-    requestedConfiguration3_7.sessionParams.setToExternal(
-            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            sessionParamSize);
-
-    // Invoke configureStreams
-    device::V3_3::HalStreamConfiguration finalConfiguration;
-    device::V3_4::HalStreamConfiguration finalConfiguration3_4;
-    device::V3_6::HalStreamConfiguration finalConfiguration3_6;
-    common::V1_0::Status status;
-
-    auto configStream34Cb = [&status, &finalConfiguration3_4]
-            (common::V1_0::Status s, const device::V3_4::HalStreamConfiguration& halConfiguration) {
-                finalConfiguration3_4 = halConfiguration;
-                status = s;
-            };
-
-    auto configStream36Cb = [&status, &finalConfiguration3_6]
-            (common::V1_0::Status s, const device::V3_6::HalStreamConfiguration& halConfiguration) {
-                finalConfiguration3_6 = halConfiguration;
-                status = s;
-            };
-
-    auto postprocConfigStream34 = [&finalConfiguration, &finalConfiguration3_4]
-            (hardware::Return<void>& err) -> status_t {
-                if (!err.isOk()) {
-                    ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-                    return DEAD_OBJECT;
-                }
-                finalConfiguration.streams.resize(finalConfiguration3_4.streams.size());
-                for (size_t i = 0; i < finalConfiguration3_4.streams.size(); i++) {
-                    finalConfiguration.streams[i] = finalConfiguration3_4.streams[i].v3_3;
-                }
-                return OK;
-            };
-
-    auto postprocConfigStream36 = [&finalConfiguration, &finalConfiguration3_6]
-            (hardware::Return<void>& err) -> status_t {
-                if (!err.isOk()) {
-                    ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-                    return DEAD_OBJECT;
-                }
-                finalConfiguration.streams.resize(finalConfiguration3_6.streams.size());
-                for (size_t i = 0; i < finalConfiguration3_6.streams.size(); i++) {
-                    finalConfiguration.streams[i] = finalConfiguration3_6.streams[i].v3_4.v3_3;
-                }
-                return OK;
-            };
-
-    // See which version of HAL we have
-    if (mHidlSession_3_7 != nullptr) {
-        ALOGV("%s: v3.7 device found", __FUNCTION__);
-        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
-        requestedConfiguration3_7.multiResolutionInputImage = config->input_is_multi_resolution;
-        auto err = mHidlSession_3_7->configureStreams_3_7(
-                requestedConfiguration3_7, configStream36Cb);
-        res = postprocConfigStream36(err);
-        if (res != OK) {
-            return res;
-        }
-    } else if (mHidlSession_3_6 != nullptr) {
-        ALOGV("%s: v3.6 device found", __FUNCTION__);
-        device::V3_5::StreamConfiguration requestedConfiguration3_5;
-        requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
-        requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
-        auto err = mHidlSession_3_6->configureStreams_3_6(
-                requestedConfiguration3_5, configStream36Cb);
-        res = postprocConfigStream36(err);
-        if (res != OK) {
-            return res;
-        }
-    } else if (mHidlSession_3_5 != nullptr) {
-        ALOGV("%s: v3.5 device found", __FUNCTION__);
-        device::V3_5::StreamConfiguration requestedConfiguration3_5;
-        requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
-        requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
-        auto err = mHidlSession_3_5->configureStreams_3_5(
-                requestedConfiguration3_5, configStream34Cb);
-        res = postprocConfigStream34(err);
-        if (res != OK) {
-            return res;
-        }
-    } else if (mHidlSession_3_4 != nullptr) {
-        // We do; use v3.4 for the call
-        ALOGV("%s: v3.4 device found", __FUNCTION__);
-        auto err = mHidlSession_3_4->configureStreams_3_4(
-                requestedConfiguration3_4, configStream34Cb);
-        res = postprocConfigStream34(err);
-        if (res != OK) {
-            return res;
-        }
-    } else if (mHidlSession_3_3 != nullptr) {
-        // We do; use v3.3 for the call
-        ALOGV("%s: v3.3 device found", __FUNCTION__);
-        auto err = mHidlSession_3_3->configureStreams_3_3(requestedConfiguration3_2,
-            [&status, &finalConfiguration]
-            (common::V1_0::Status s, const device::V3_3::HalStreamConfiguration& halConfiguration) {
-                finalConfiguration = halConfiguration;
-                status = s;
-            });
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-            return DEAD_OBJECT;
-        }
-    } else {
-        // We don't; use v3.2 call and construct a v3.3 HalStreamConfiguration
-        ALOGV("%s: v3.2 device found", __FUNCTION__);
-        HalStreamConfiguration finalConfiguration_3_2;
-        auto err = mHidlSession->configureStreams(requestedConfiguration3_2,
-                [&status, &finalConfiguration_3_2]
-                (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
-                    finalConfiguration_3_2 = halConfiguration;
-                    status = s;
-                });
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-            return DEAD_OBJECT;
-        }
-        finalConfiguration.streams.resize(finalConfiguration_3_2.streams.size());
-        for (size_t i = 0; i < finalConfiguration_3_2.streams.size(); i++) {
-            finalConfiguration.streams[i].v3_2 = finalConfiguration_3_2.streams[i];
-            finalConfiguration.streams[i].overrideDataSpace =
-                    requestedConfiguration3_2.streams[i].dataSpace;
-        }
-    }
-
-    if (status != common::V1_0::Status::OK ) {
-        return CameraProviderManager::mapToStatusT(status);
-    }
-
-    // And convert output stream configuration from HIDL
-
-    for (size_t i = 0; i < config->num_streams; i++) {
-        camera3::camera_stream_t *dst = config->streams[i];
-        int streamId = Camera3Stream::cast(dst)->getId();
-
-        // Start scan at i, with the assumption that the stream order matches
-        size_t realIdx = i;
-        bool found = false;
-        size_t halStreamCount = finalConfiguration.streams.size();
-        for (size_t idx = 0; idx < halStreamCount; idx++) {
-            if (finalConfiguration.streams[realIdx].v3_2.id == streamId) {
-                found = true;
-                break;
-            }
-            realIdx = (realIdx >= halStreamCount - 1) ? 0 : realIdx + 1;
-        }
-        if (!found) {
-            ALOGE("%s: Stream %d not found in stream configuration response from HAL",
-                    __FUNCTION__, streamId);
-            return INVALID_OPERATION;
-        }
-        device::V3_3::HalStream &src = finalConfiguration.streams[realIdx];
-        device::V3_6::HalStream &src_36 = finalConfiguration3_6.streams[realIdx];
-
-        Camera3Stream* dstStream = Camera3Stream::cast(dst);
-        int overrideFormat = mapToFrameworkFormat(src.v3_2.overrideFormat);
-        android_dataspace overrideDataSpace = mapToFrameworkDataspace(src.overrideDataSpace);
-
-        if (mHidlSession_3_6 != nullptr) {
-            dstStream->setOfflineProcessingSupport(src_36.supportOffline);
-        }
-
-        if (dstStream->getOriginalFormat() != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-            dstStream->setFormatOverride(false);
-            dstStream->setDataSpaceOverride(false);
-            if (dst->format != overrideFormat) {
-                ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
-                        streamId, dst->format);
-            }
-            if (dst->data_space != overrideDataSpace) {
-                ALOGE("%s: Stream %d: DataSpace override not allowed for format 0x%x", __FUNCTION__,
-                        streamId, dst->format);
-            }
-        } else {
-            bool needFormatOverride =
-                    requestedConfiguration3_2.streams[i].format != src.v3_2.overrideFormat;
-            bool needDataspaceOverride =
-                    requestedConfiguration3_2.streams[i].dataSpace != src.overrideDataSpace;
-            // Override allowed with IMPLEMENTATION_DEFINED
-            dstStream->setFormatOverride(needFormatOverride);
-            dstStream->setDataSpaceOverride(needDataspaceOverride);
-            dst->format = overrideFormat;
-            dst->data_space = overrideDataSpace;
-        }
-
-        if (dst->stream_type == CAMERA_STREAM_INPUT) {
-            if (src.v3_2.producerUsage != 0) {
-                ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
-                        __FUNCTION__, streamId);
-                return INVALID_OPERATION;
-            }
-            dstStream->setUsage(
-                    mapConsumerToFrameworkUsage(src.v3_2.consumerUsage));
-        } else {
-            // OUTPUT
-            if (src.v3_2.consumerUsage != 0) {
-                ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
-                        __FUNCTION__, streamId);
-                return INVALID_OPERATION;
-            }
-            dstStream->setUsage(
-                    mapProducerToFrameworkUsage(src.v3_2.producerUsage));
-        }
-        dst->max_buffers = src.v3_2.maxBuffers;
-    }
-
-    return res;
-}
-
-status_t Camera3Device::HalInterface::configureInjectedStreams(
-        const camera_metadata_t* sessionParams, camera_stream_configuration* config,
-        const std::vector<uint32_t>& bufferSizes,
-        const CameraMetadata& cameraCharacteristics) {
-    ATRACE_NAME("InjectionCameraHal::configureStreams");
-    if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
-
-    if (config->input_is_multi_resolution) {
-        ALOGE("%s: Injection camera device doesn't support multi-resolution input "
-                "stream", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    // Convert stream config to HIDL
-    std::set<int> activeStreams;
-    device::V3_2::StreamConfiguration requestedConfiguration3_2;
-    device::V3_4::StreamConfiguration requestedConfiguration3_4;
-    device::V3_7::StreamConfiguration requestedConfiguration3_7;
-    requestedConfiguration3_2.streams.resize(config->num_streams);
-    requestedConfiguration3_4.streams.resize(config->num_streams);
-    requestedConfiguration3_7.streams.resize(config->num_streams);
-    for (size_t i = 0; i < config->num_streams; i++) {
-        device::V3_2::Stream& dst3_2 = requestedConfiguration3_2.streams[i];
-        device::V3_4::Stream& dst3_4 = requestedConfiguration3_4.streams[i];
-        device::V3_7::Stream& dst3_7 = requestedConfiguration3_7.streams[i];
-        camera3::camera_stream_t* src = config->streams[i];
-
-        Camera3Stream* cam3stream = Camera3Stream::cast(src);
-        cam3stream->setBufferFreedListener(this);
-        int streamId = cam3stream->getId();
-        StreamType streamType;
-        switch (src->stream_type) {
-            case CAMERA_STREAM_OUTPUT:
-                streamType = StreamType::OUTPUT;
-                break;
-            case CAMERA_STREAM_INPUT:
-                streamType = StreamType::INPUT;
-                break;
-            default:
-                ALOGE("%s: Stream %d: Unsupported stream type %d", __FUNCTION__,
-                        streamId, config->streams[i]->stream_type);
-            return BAD_VALUE;
-        }
-        dst3_2.id = streamId;
-        dst3_2.streamType = streamType;
-        dst3_2.width = src->width;
-        dst3_2.height = src->height;
-        dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
-        dst3_2.rotation =
-                mapToStreamRotation((camera_stream_rotation_t)src->rotation);
-        // For HidlSession version 3.5 or newer, the format and dataSpace sent
-        // to HAL are original, not the overridden ones.
-        if (mHidlSession_3_5 != nullptr) {
-            dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden()
-                                            ? cam3stream->getOriginalFormat()
-                                            : src->format);
-            dst3_2.dataSpace =
-                    mapToHidlDataspace(cam3stream->isDataSpaceOverridden()
-                                    ? cam3stream->getOriginalDataSpace()
-                                    : src->data_space);
-        } else {
-            dst3_2.format = mapToPixelFormat(src->format);
-            dst3_2.dataSpace = mapToHidlDataspace(src->data_space);
-        }
-        dst3_4.v3_2 = dst3_2;
-        dst3_4.bufferSize = bufferSizes[i];
-        if (src->physical_camera_id != nullptr) {
-            dst3_4.physicalCameraId = src->physical_camera_id;
-        }
-        dst3_7.v3_4 = dst3_4;
-        dst3_7.groupId = cam3stream->getHalStreamGroupId();
-        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
-        size_t j = 0;
-        for (int mode : src->sensor_pixel_modes_used) {
-            dst3_7.sensorPixelModesUsed[j++] =
-                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
-        }
-        activeStreams.insert(streamId);
-        // Create Buffer ID map if necessary
-        mBufferRecords.tryCreateBufferCache(streamId);
-    }
-    // remove BufferIdMap for deleted streams
-    mBufferRecords.removeInactiveBufferCaches(activeStreams);
-
-    StreamConfigurationMode operationMode;
-    res = mapToStreamConfigurationMode(
-            (camera_stream_configuration_mode_t)config->operation_mode,
-            /*out*/ &operationMode);
-    if (res != OK) {
-        return res;
-    }
-    requestedConfiguration3_7.operationMode = operationMode;
-    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
-    requestedConfiguration3_7.operationMode = operationMode;
-    requestedConfiguration3_7.sessionParams.setToExternal(
-            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
-            sessionParamSize);
-
-    // See which version of HAL we have
-    if (mHidlSession_3_7 != nullptr) {
-        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
-        requestedConfiguration3_7.multiResolutionInputImage =
-                config->input_is_multi_resolution;
-
-        const camera_metadata_t* rawMetadata = cameraCharacteristics.getAndLock();
-        ::android::hardware::camera::device::V3_2::CameraMetadata hidlChars = {};
-        hidlChars.setToExternal(
-                reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(rawMetadata)),
-                get_camera_metadata_size(rawMetadata));
-        cameraCharacteristics.unlock(rawMetadata);
-
-        sp<hardware::camera::device::V3_7::ICameraInjectionSession>
-                hidlInjectionSession_3_7;
-        auto castInjectionResult_3_7 =
-                device::V3_7::ICameraInjectionSession::castFrom(mHidlSession_3_7);
-        if (castInjectionResult_3_7.isOk()) {
-            hidlInjectionSession_3_7 = castInjectionResult_3_7;
-        } else {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__,
-                    castInjectionResult_3_7.description().c_str());
-            return DEAD_OBJECT;
-        }
-
-        auto err = hidlInjectionSession_3_7->configureInjectionStreams(
-                requestedConfiguration3_7, hidlChars);
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error: %s", __FUNCTION__,
-                    err.description().c_str());
-            return DEAD_OBJECT;
-        }
-    } else {
-        ALOGE("%s: mHidlSession_3_7 does not exist, the lowest version of injection "
-                "session is 3.7", __FUNCTION__);
-        return DEAD_OBJECT;
-    }
-
-    return res;
-}
-
-status_t Camera3Device::HalInterface::wrapAsHidlRequest(camera_capture_request_t* request,
-        /*out*/device::V3_2::CaptureRequest* captureRequest,
-        /*out*/std::vector<native_handle_t*>* handlesCreated,
-        /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers) {
-    ATRACE_CALL();
-    if (captureRequest == nullptr || handlesCreated == nullptr || inflightBuffers == nullptr) {
-        ALOGE("%s: captureRequest (%p), handlesCreated (%p), and inflightBuffers(%p) "
-                "must not be null", __FUNCTION__, captureRequest, handlesCreated, inflightBuffers);
-        return BAD_VALUE;
-    }
-
-    captureRequest->frameNumber = request->frame_number;
-
-    captureRequest->fmqSettingsSize = 0;
-
-    {
-        if (request->input_buffer != nullptr) {
-            int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
-            buffer_handle_t buf = *(request->input_buffer->buffer);
-            auto pair = getBufferId(buf, streamId);
-            bool isNewBuffer = pair.first;
-            uint64_t bufferId = pair.second;
-            captureRequest->inputBuffer.streamId = streamId;
-            captureRequest->inputBuffer.bufferId = bufferId;
-            captureRequest->inputBuffer.buffer = (isNewBuffer) ? buf : nullptr;
-            captureRequest->inputBuffer.status = BufferStatus::OK;
-            native_handle_t *acquireFence = nullptr;
-            if (request->input_buffer->acquire_fence != -1) {
-                acquireFence = native_handle_create(1,0);
-                acquireFence->data[0] = request->input_buffer->acquire_fence;
-                handlesCreated->push_back(acquireFence);
-            }
-            captureRequest->inputBuffer.acquireFence = acquireFence;
-            captureRequest->inputBuffer.releaseFence = nullptr;
-
-            mBufferRecords.pushInflightBuffer(captureRequest->frameNumber, streamId,
-                    request->input_buffer->buffer);
-            inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
-        } else {
-            captureRequest->inputBuffer.streamId = -1;
-            captureRequest->inputBuffer.bufferId = BUFFER_ID_NO_BUFFER;
-        }
-
-        captureRequest->outputBuffers.resize(request->num_output_buffers);
-        for (size_t i = 0; i < request->num_output_buffers; i++) {
-            const camera_stream_buffer_t *src = request->output_buffers + i;
-            StreamBuffer &dst = captureRequest->outputBuffers[i];
-            int32_t streamId = Camera3Stream::cast(src->stream)->getId();
-            if (src->buffer != nullptr) {
-                buffer_handle_t buf = *(src->buffer);
-                auto pair = getBufferId(buf, streamId);
-                bool isNewBuffer = pair.first;
-                dst.bufferId = pair.second;
-                dst.buffer = isNewBuffer ? buf : nullptr;
-                native_handle_t *acquireFence = nullptr;
-                if (src->acquire_fence != -1) {
-                    acquireFence = native_handle_create(1,0);
-                    acquireFence->data[0] = src->acquire_fence;
-                    handlesCreated->push_back(acquireFence);
-                }
-                dst.acquireFence = acquireFence;
-            } else if (mUseHalBufManager) {
-                // HAL buffer management path
-                dst.bufferId = BUFFER_ID_NO_BUFFER;
-                dst.buffer = nullptr;
-                dst.acquireFence = nullptr;
-            } else {
-                ALOGE("%s: cannot send a null buffer in capture request!", __FUNCTION__);
-                return BAD_VALUE;
-            }
-            dst.streamId = streamId;
-            dst.status = BufferStatus::OK;
-            dst.releaseFence = nullptr;
-
-            // Output buffers are empty when using HAL buffer manager
-            if (!mUseHalBufManager) {
-                mBufferRecords.pushInflightBuffer(
-                        captureRequest->frameNumber, streamId, src->buffer);
-                inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
-            }
-        }
-    }
-    return OK;
-}
-
-void Camera3Device::HalInterface::cleanupNativeHandles(
+void Camera3Device::cleanupNativeHandles(
         std::vector<native_handle_t*> *handles, bool closeFd) {
     if (handles == nullptr) {
         return;
@@ -3808,301 +2799,9 @@
     return;
 }
 
-status_t Camera3Device::HalInterface::processBatchCaptureRequests(
-        std::vector<camera_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
-    ATRACE_NAME("CameraHal::processBatchCaptureRequests");
-    if (!valid()) return INVALID_OPERATION;
-
-    sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
-    sp<device::V3_7::ICameraDeviceSession> hidlSession_3_7;
-    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_7.isOk()) {
-        hidlSession_3_7 = castResult_3_7;
-    }
-    auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult_3_4.isOk()) {
-        hidlSession_3_4 = castResult_3_4;
-    }
-
-    hardware::hidl_vec<device::V3_2::CaptureRequest> captureRequests;
-    hardware::hidl_vec<device::V3_4::CaptureRequest> captureRequests_3_4;
-    hardware::hidl_vec<device::V3_7::CaptureRequest> captureRequests_3_7;
-    size_t batchSize = requests.size();
-    if (hidlSession_3_7 != nullptr) {
-        captureRequests_3_7.resize(batchSize);
-    } else if (hidlSession_3_4 != nullptr) {
-        captureRequests_3_4.resize(batchSize);
-    } else {
-        captureRequests.resize(batchSize);
-    }
-    std::vector<native_handle_t*> handlesCreated;
-    std::vector<std::pair<int32_t, int32_t>> inflightBuffers;
-
-    status_t res = OK;
-    for (size_t i = 0; i < batchSize; i++) {
-        if (hidlSession_3_7 != nullptr) {
-            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_7[i].v3_4.v3_2,
-                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
-        } else if (hidlSession_3_4 != nullptr) {
-            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
-                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
-        } else {
-            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests[i],
-                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
-        }
-        if (res != OK) {
-            mBufferRecords.popInflightBuffers(inflightBuffers);
-            cleanupNativeHandles(&handlesCreated);
-            return res;
-        }
-    }
-
-    std::vector<device::V3_2::BufferCache> cachesToRemove;
-    {
-        std::lock_guard<std::mutex> lock(mFreedBuffersLock);
-        for (auto& pair : mFreedBuffers) {
-            // The stream might have been removed since onBufferFreed
-            if (mBufferRecords.isStreamCached(pair.first)) {
-                cachesToRemove.push_back({pair.first, pair.second});
-            }
-        }
-        mFreedBuffers.clear();
-    }
-
-    common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
-    *numRequestProcessed = 0;
-
-    // Write metadata to FMQ.
-    for (size_t i = 0; i < batchSize; i++) {
-        camera_capture_request_t* request = requests[i];
-        device::V3_2::CaptureRequest* captureRequest;
-        if (hidlSession_3_7 != nullptr) {
-            captureRequest = &captureRequests_3_7[i].v3_4.v3_2;
-        } else if (hidlSession_3_4 != nullptr) {
-            captureRequest = &captureRequests_3_4[i].v3_2;
-        } else {
-            captureRequest = &captureRequests[i];
-        }
-
-        if (request->settings != nullptr) {
-            size_t settingsSize = get_camera_metadata_size(request->settings);
-            if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
-                    reinterpret_cast<const uint8_t*>(request->settings), settingsSize)) {
-                captureRequest->settings.resize(0);
-                captureRequest->fmqSettingsSize = settingsSize;
-            } else {
-                if (mRequestMetadataQueue != nullptr) {
-                    ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
-                }
-                captureRequest->settings.setToExternal(
-                        reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(request->settings)),
-                        get_camera_metadata_size(request->settings));
-                captureRequest->fmqSettingsSize = 0u;
-            }
-        } else {
-            // A null request settings maps to a size-0 CameraMetadata
-            captureRequest->settings.resize(0);
-            captureRequest->fmqSettingsSize = 0u;
-        }
-
-        // hidl session 3.7 specific handling.
-        if (hidlSession_3_7 != nullptr) {
-            captureRequests_3_7[i].inputWidth = request->input_width;
-            captureRequests_3_7[i].inputHeight = request->input_height;
-        }
-
-        // hidl session 3.7 and 3.4 specific handling.
-        if (hidlSession_3_7 != nullptr || hidlSession_3_4 != nullptr) {
-            hardware::hidl_vec<device::V3_4::PhysicalCameraSetting>& physicalCameraSettings =
-                    (hidlSession_3_7 != nullptr) ?
-                    captureRequests_3_7[i].v3_4.physicalCameraSettings :
-                    captureRequests_3_4[i].physicalCameraSettings;
-            physicalCameraSettings.resize(request->num_physcam_settings);
-            for (size_t j = 0; j < request->num_physcam_settings; j++) {
-                if (request->physcam_settings != nullptr) {
-                    size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
-                    if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
-                                reinterpret_cast<const uint8_t*>(request->physcam_settings[j]),
-                                settingsSize)) {
-                        physicalCameraSettings[j].settings.resize(0);
-                        physicalCameraSettings[j].fmqSettingsSize = settingsSize;
-                    } else {
-                        if (mRequestMetadataQueue != nullptr) {
-                            ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
-                        }
-                        physicalCameraSettings[j].settings.setToExternal(
-                                reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
-                                        request->physcam_settings[j])),
-                                get_camera_metadata_size(request->physcam_settings[j]));
-                        physicalCameraSettings[j].fmqSettingsSize = 0u;
-                    }
-                } else {
-                    physicalCameraSettings[j].fmqSettingsSize = 0u;
-                    physicalCameraSettings[j].settings.resize(0);
-                }
-                physicalCameraSettings[j].physicalCameraId = request->physcam_id[j];
-            }
-        }
-    }
-
-    hardware::details::return_status err;
-    auto resultCallback =
-        [&status, &numRequestProcessed] (auto s, uint32_t n) {
-                status = s;
-                *numRequestProcessed = n;
-        };
-    if (hidlSession_3_7 != nullptr) {
-        err = hidlSession_3_7->processCaptureRequest_3_7(captureRequests_3_7, cachesToRemove,
-                                                         resultCallback);
-    } else if (hidlSession_3_4 != nullptr) {
-        err = hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
-                                                         resultCallback);
-    } else {
-        err = mHidlSession->processCaptureRequest(captureRequests, cachesToRemove,
-                                                  resultCallback);
-    }
-    if (!err.isOk()) {
-        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-        status = common::V1_0::Status::CAMERA_DISCONNECTED;
-    }
-
-    if (status == common::V1_0::Status::OK && *numRequestProcessed != batchSize) {
-        ALOGE("%s: processCaptureRequest returns OK but processed %d/%zu requests",
-                __FUNCTION__, *numRequestProcessed, batchSize);
-        status = common::V1_0::Status::INTERNAL_ERROR;
-    }
-
-    res = CameraProviderManager::mapToStatusT(status);
-    if (res == OK) {
-        if (mHidlSession->isRemote()) {
-            // Only close acquire fence FDs when the HIDL transaction succeeds (so the FDs have been
-            // sent to camera HAL processes)
-            cleanupNativeHandles(&handlesCreated, /*closeFd*/true);
-        } else {
-            // In passthrough mode the FDs are now owned by HAL
-            cleanupNativeHandles(&handlesCreated);
-        }
-    } else {
-        mBufferRecords.popInflightBuffers(inflightBuffers);
-        cleanupNativeHandles(&handlesCreated);
-    }
-    return res;
-}
-
-status_t Camera3Device::HalInterface::flush() {
-    ATRACE_NAME("CameraHal::flush");
-    if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
-
-    auto err = mHidlSession->flush();
-    if (!err.isOk()) {
-        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-        res = DEAD_OBJECT;
-    } else {
-        res = CameraProviderManager::mapToStatusT(err);
-    }
-
-    return res;
-}
-
-status_t Camera3Device::HalInterface::dump(int /*fd*/) {
-    ATRACE_NAME("CameraHal::dump");
-    if (!valid()) return INVALID_OPERATION;
-
-    // Handled by CameraProviderManager::dump
-
-    return OK;
-}
-
-status_t Camera3Device::HalInterface::close() {
-    ATRACE_NAME("CameraHal::close()");
-    if (!valid()) return INVALID_OPERATION;
-    status_t res = OK;
-
-    auto err = mHidlSession->close();
-    // Interface will be dead shortly anyway, so don't log errors
-    if (!err.isOk()) {
-        res = DEAD_OBJECT;
-    }
-
-    return res;
-}
-
-void Camera3Device::HalInterface::signalPipelineDrain(const std::vector<int>& streamIds) {
-    ATRACE_NAME("CameraHal::signalPipelineDrain");
-    if (!valid() || mHidlSession_3_5 == nullptr) {
-        ALOGE("%s called on invalid camera!", __FUNCTION__);
-        return;
-    }
-
-    auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter - 1);
-    if (!err.isOk()) {
-        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-        return;
-    }
-}
-
-status_t Camera3Device::HalInterface::switchToOffline(
-        const std::vector<int32_t>& streamsToKeep,
-        /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
-        /*out*/camera3::BufferRecords* bufferRecords) {
-    ATRACE_NAME("CameraHal::switchToOffline");
-    if (!valid() || mHidlSession_3_6 == nullptr) {
-        ALOGE("%s called on invalid camera!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    if (offlineSessionInfo == nullptr || offlineSession == nullptr || bufferRecords == nullptr) {
-        ALOGE("%s: output arguments must not be null!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
-    auto resultCallback =
-        [&status, &offlineSessionInfo, &offlineSession] (auto s, auto info, auto session) {
-                status = s;
-                *offlineSessionInfo = info;
-                *offlineSession = session;
-        };
-    auto err = mHidlSession_3_6->switchToOffline(streamsToKeep, resultCallback);
-
-    if (!err.isOk()) {
-        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
-        return DEAD_OBJECT;
-    }
-
-    status_t ret = CameraProviderManager::mapToStatusT(status);
-    if (ret != OK) {
-        return ret;
-    }
-
-    // TODO: assert no ongoing requestBuffer/returnBuffer call here
-    // TODO: update RequestBufferStateMachine to block requestBuffer/returnBuffer once HAL
-    //       returns from switchToOffline.
-
-
-    // Validate buffer caches
-    std::vector<int32_t> streams;
-    streams.reserve(offlineSessionInfo->offlineStreams.size());
-    for (auto offlineStream : offlineSessionInfo->offlineStreams) {
-        int32_t id = offlineStream.id;
-        streams.push_back(id);
-        // Verify buffer caches
-        std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
-                offlineStream.circulatingBufferIds.end());
-        if (!verifyBufferIds(id, bufIds)) {
-            ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    // Move buffer records
-    bufferRecords->takeBufferCaches(mBufferRecords, streams);
-    bufferRecords->takeInflightBufferMap(mBufferRecords);
-    bufferRecords->takeRequestedBufferMap(mBufferRecords);
-    return ret;
-}
+/**
+ * HalInterface inner class methods
+ */
 
 void Camera3Device::HalInterface::getInflightBufferKeys(
         std::vector<std::pair<int32_t, int32_t>>* out) {
@@ -4182,6 +2881,7 @@
         mInterface(interface),
         mListener(nullptr),
         mId(getId(parent)),
+        mRequestClearing(false),
         mFirstRepeating(false),
         mReconfigured(false),
         mDoPause(false),
@@ -4348,10 +3048,20 @@
 }
 
 status_t Camera3Device::RequestThread::clearRepeatingRequestsLocked(/*out*/int64_t *lastFrameNumber) {
+    std::vector<int32_t> streamIds;
+    for (const auto& request : mRepeatingRequests) {
+        for (const auto& stream : request->mOutputStreams) {
+            streamIds.push_back(stream->getId());
+        }
+    }
+
     mRepeatingRequests.clear();
     if (lastFrameNumber != NULL) {
         *lastFrameNumber = mRepeatingLastFrameNumber;
     }
+
+    mInterface->repeatingRequestEnd(mRepeatingLastFrameNumber, streamIds);
+
     mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
     return OK;
 }
@@ -4405,6 +3115,7 @@
         *lastFrameNumber = mRepeatingLastFrameNumber;
     }
     mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
+    mRequestClearing = true;
     mRequestSignal.signal();
     return OK;
 }
@@ -4532,13 +3243,16 @@
     return true;
 }
 
-nsecs_t Camera3Device::RequestThread::calculateMaxExpectedDuration(const camera_metadata_t *request) {
-    nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
+std::pair<nsecs_t, nsecs_t> Camera3Device::RequestThread::calculateExpectedDurationRange(
+        const camera_metadata_t *request) {
+    std::pair<nsecs_t, nsecs_t> expectedRange(
+            InFlightRequest::kDefaultMinExpectedDuration,
+            InFlightRequest::kDefaultMaxExpectedDuration);
     camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
     find_camera_metadata_ro_entry(request,
             ANDROID_CONTROL_AE_MODE,
             &e);
-    if (e.count == 0) return maxExpectedDuration;
+    if (e.count == 0) return expectedRange;
 
     switch (e.data.u8[0]) {
         case ANDROID_CONTROL_AE_MODE_OFF:
@@ -4546,13 +3260,15 @@
                     ANDROID_SENSOR_EXPOSURE_TIME,
                     &e);
             if (e.count > 0) {
-                maxExpectedDuration = e.data.i64[0];
+                expectedRange.first = e.data.i64[0];
+                expectedRange.second = expectedRange.first;
             }
             find_camera_metadata_ro_entry(request,
                     ANDROID_SENSOR_FRAME_DURATION,
                     &e);
             if (e.count > 0) {
-                maxExpectedDuration = std::max(e.data.i64[0], maxExpectedDuration);
+                expectedRange.first = std::max(e.data.i64[0], expectedRange.first);
+                expectedRange.second = expectedRange.first;
             }
             break;
         default:
@@ -4560,12 +3276,13 @@
                     ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
                     &e);
             if (e.count > 1) {
-                maxExpectedDuration = 1e9 / e.data.u8[0];
+                expectedRange.first = 1e9 / e.data.i32[1];
+                expectedRange.second = 1e9 / e.data.i32[0];
             }
             break;
     }
 
-    return maxExpectedDuration;
+    return expectedRange;
 }
 
 bool Camera3Device::RequestThread::skipHFRTargetFPSUpdate(int32_t tag,
@@ -4597,9 +3314,15 @@
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent != NULL) {
+            int32_t inputStreamId = -1;
+            if (halRequest.input_buffer != nullptr) {
+              inputStreamId = Camera3Stream::cast(halRequest.input_buffer->stream)->getId();
+            }
+
             parent->monitorMetadata(TagMonitor::REQUEST,
                     halRequest.frame_number,
-                    0, mLatestRequest, mLatestPhysicalRequest);
+                    0, mLatestRequest, mLatestPhysicalRequest, halRequest.output_buffers,
+                    halRequest.num_output_buffers, inputStreamId);
         }
     }
 
@@ -5174,11 +3897,13 @@
                 isZslCapture = true;
             }
         }
+        auto expectedDurationRange = calculateExpectedDurationRange(settings);
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
                 /*hasInput*/halRequest->input_buffer != NULL,
                 hasCallback,
-                calculateMaxExpectedDuration(settings),
+                /*min*/expectedDurationRange.first,
+                /*max*/expectedDurationRange.second,
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
                 captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
                 (mUseHalBufManager) ? uniqueSurfaceIdMap :
@@ -5310,38 +4035,6 @@
     mPrevRequest.clear();
 }
 
-status_t Camera3Device::RequestThread::switchToOffline(
-        const std::vector<int32_t>& streamsToKeep,
-        /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
-        /*out*/camera3::BufferRecords* bufferRecords) {
-    Mutex::Autolock l(mRequestLock);
-    clearRepeatingRequestsLocked(/*lastFrameNumber*/nullptr);
-
-    // Wait until request thread is fully stopped
-    // TBD: check if request thread is being paused by other APIs (shouldn't be)
-
-    // We could also check for mRepeatingRequests.empty(), but the API interface
-    // is serialized by Camera3Device::mInterfaceLock so no one should be able to submit any
-    // new requests during the call; hence skip that check.
-    bool queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
-    while (!queueEmpty) {
-        status_t res = mRequestSubmittedSignal.waitRelative(mRequestLock, kRequestSubmitTimeout);
-        if (res == TIMED_OUT) {
-            ALOGE("%s: request thread failed to submit one request within timeout!", __FUNCTION__);
-            return res;
-        } else if (res != OK) {
-            ALOGE("%s: request thread failed to submit a request: %s (%d)!",
-                    __FUNCTION__, strerror(-res), res);
-            return res;
-        }
-        queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
-    }
-
-    return mInterface->switchToOffline(
-            streamsToKeep, offlineSessionInfo, offlineSession, bufferRecords);
-}
-
 status_t Camera3Device::RequestThread::setRotateAndCropAutoBehavior(
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) {
     ATRACE_CALL();
@@ -5438,7 +4131,8 @@
                     outputBuffers->editItemAt(i).acquire_fence = -1;
                 }
                 outputBuffers->editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
-                captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i], 0,
+                captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i],
+                        /*timestamp*/0, /*readoutTimestamp*/0,
                         /*timestampIncreasing*/true, std::vector<size_t> (),
                         captureRequest->mResultExtras.frameNumber);
             }
@@ -5550,7 +4244,9 @@
             break;
         }
 
-        res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
+        if (!mRequestClearing) {
+            res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
+        }
 
         if ((mRequestQueue.empty() && mRepeatingRequests.empty()) ||
                 exitPending()) {
@@ -5573,6 +4269,7 @@
                     parent->mRequestBufferSM.onRequestThreadPaused();
                 }
             }
+            mRequestClearing = false;
             // Stop waiting for now and let thread management happen
             return NULL;
         }
@@ -6371,220 +5068,6 @@
     return ret;
 }
 
-status_t Camera3Device::switchToOffline(
-        const std::vector<int32_t>& streamsToKeep,
-        /*out*/ sp<CameraOfflineSessionBase>* session) {
-    ATRACE_CALL();
-    if (session == nullptr) {
-        ALOGE("%s: session must not be null", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock il(mInterfaceLock);
-
-    bool hasInputStream = mInputStream != nullptr;
-    int32_t inputStreamId = hasInputStream ? mInputStream->getId() : -1;
-    bool inputStreamSupportsOffline = hasInputStream ?
-            mInputStream->getOfflineProcessingSupport() : false;
-    auto outputStreamIds = mOutputStreams.getStreamIds();
-    auto streamIds = outputStreamIds;
-    if (hasInputStream) {
-        streamIds.push_back(mInputStream->getId());
-    }
-
-    // Check all streams in streamsToKeep supports offline mode
-    for (auto id : streamsToKeep) {
-        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
-            ALOGE("%s: Unknown stream ID %d", __FUNCTION__, id);
-            return BAD_VALUE;
-        } else if (id == inputStreamId) {
-            if (!inputStreamSupportsOffline) {
-                ALOGE("%s: input stream %d cannot be switched to offline",
-                        __FUNCTION__, id);
-                return BAD_VALUE;
-            }
-        } else {
-            sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
-            if (!stream->getOfflineProcessingSupport()) {
-                ALOGE("%s: output stream %d cannot be switched to offline",
-                        __FUNCTION__, id);
-                return BAD_VALUE;
-            }
-        }
-    }
-
-    // TODO: block surface sharing and surface group streams until we can support them
-
-    // Stop repeating request, wait until all remaining requests are submitted, then call into
-    // HAL switchToOffline
-    hardware::camera::device::V3_6::CameraOfflineSessionInfo offlineSessionInfo;
-    sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession;
-    camera3::BufferRecords bufferRecords;
-    status_t ret = mRequestThread->switchToOffline(
-            streamsToKeep, &offlineSessionInfo, &offlineSession, &bufferRecords);
-
-    if (ret != OK) {
-        SET_ERR("Switch to offline failed: %s (%d)", strerror(-ret), ret);
-        return ret;
-    }
-
-    bool succ = mRequestBufferSM.onSwitchToOfflineSuccess();
-    if (!succ) {
-        SET_ERR("HAL must not be calling requestStreamBuffers call");
-        // TODO: block ALL callbacks from HAL till app configured new streams?
-        return UNKNOWN_ERROR;
-    }
-
-    // Verify offlineSessionInfo
-    std::vector<int32_t> offlineStreamIds;
-    offlineStreamIds.reserve(offlineSessionInfo.offlineStreams.size());
-    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
-        // verify stream IDs
-        int32_t id = offlineStream.id;
-        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
-            SET_ERR("stream ID %d not found!", id);
-            return UNKNOWN_ERROR;
-        }
-
-        // When not using HAL buf manager, only allow streams requested by app to be preserved
-        if (!mUseHalBufManager) {
-            if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
-                SET_ERR("stream ID %d must not be switched to offline!", id);
-                return UNKNOWN_ERROR;
-            }
-        }
-
-        offlineStreamIds.push_back(id);
-        sp<Camera3StreamInterface> stream = (id == inputStreamId) ?
-                static_cast<sp<Camera3StreamInterface>>(mInputStream) :
-                static_cast<sp<Camera3StreamInterface>>(mOutputStreams.get(id));
-        // Verify number of outstanding buffers
-        if (stream->getOutstandingBuffersCount() != offlineStream.numOutstandingBuffers) {
-            SET_ERR("Offline stream %d # of remaining buffer mismatch: (%zu,%d) (service/HAL)",
-                    id, stream->getOutstandingBuffersCount(), offlineStream.numOutstandingBuffers);
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    // Verify all streams to be deleted don't have any outstanding buffers
-    if (hasInputStream && std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
-                inputStreamId) == offlineStreamIds.end()) {
-        if (mInputStream->hasOutstandingBuffers()) {
-            SET_ERR("Input stream %d still has %zu outstanding buffer!",
-                    inputStreamId, mInputStream->getOutstandingBuffersCount());
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    for (const auto& outStreamId : outputStreamIds) {
-        if (std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
-                outStreamId) == offlineStreamIds.end()) {
-            auto outStream = mOutputStreams.get(outStreamId);
-            if (outStream->hasOutstandingBuffers()) {
-                SET_ERR("Output stream %d still has %zu outstanding buffer!",
-                        outStreamId, outStream->getOutstandingBuffersCount());
-                return UNKNOWN_ERROR;
-            }
-        }
-    }
-
-    InFlightRequestMap offlineReqs;
-    // Verify inflight requests and their pending buffers
-    {
-        std::lock_guard<std::mutex> l(mInFlightLock);
-        for (auto offlineReq : offlineSessionInfo.offlineRequests) {
-            int idx = mInFlightMap.indexOfKey(offlineReq.frameNumber);
-            if (idx == NAME_NOT_FOUND) {
-                SET_ERR("Offline request frame number %d not found!", offlineReq.frameNumber);
-                return UNKNOWN_ERROR;
-            }
-
-            const auto& inflightReq = mInFlightMap.valueAt(idx);
-            // TODO: check specific stream IDs
-            size_t numBuffersLeft = static_cast<size_t>(inflightReq.numBuffersLeft);
-            if (numBuffersLeft != offlineReq.pendingStreams.size()) {
-                SET_ERR("Offline request # of remaining buffer mismatch: (%d,%d) (service/HAL)",
-                        inflightReq.numBuffersLeft, offlineReq.pendingStreams.size());
-                return UNKNOWN_ERROR;
-            }
-            offlineReqs.add(offlineReq.frameNumber, inflightReq);
-        }
-    }
-
-    // Create Camera3OfflineSession and transfer object ownership
-    //   (streams, inflight requests, buffer caches)
-    camera3::StreamSet offlineStreamSet;
-    sp<camera3::Camera3Stream> inputStream;
-    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
-        int32_t id = offlineStream.id;
-        if (mInputStream != nullptr && id == mInputStream->getId()) {
-            inputStream = mInputStream;
-        } else {
-            offlineStreamSet.add(id, mOutputStreams.get(id));
-        }
-    }
-
-    // TODO: check if we need to lock before copying states
-    //       though technically no other thread should be talking to Camera3Device at this point
-    Camera3OfflineStates offlineStates(
-            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
-            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
-            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
-            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
-            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
-            mZoomRatioMappers, mRotateAndCropMappers);
-
-    *session = new Camera3OfflineSession(mId, inputStream, offlineStreamSet,
-            std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
-
-    // Delete all streams that has been transferred to offline session
-    Mutex::Autolock l(mLock);
-    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
-        int32_t id = offlineStream.id;
-        if (mInputStream != nullptr && id == mInputStream->getId()) {
-            mInputStream.clear();
-        } else {
-            mOutputStreams.remove(id);
-        }
-    }
-
-    // disconnect all other streams and switch to UNCONFIGURED state
-    if (mInputStream != nullptr) {
-        ret = mInputStream->disconnect();
-        if (ret != OK) {
-            SET_ERR_L("disconnect input stream failed!");
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    for (auto streamId : mOutputStreams.getStreamIds()) {
-        sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
-        ret = stream->disconnect();
-        if (ret != OK) {
-            SET_ERR_L("disconnect output stream %d failed!", streamId);
-            return UNKNOWN_ERROR;
-        }
-    }
-
-    mInputStream.clear();
-    mOutputStreams.clear();
-    mNeedConfig = true;
-    internalUpdateStatusLocked(STATUS_UNCONFIGURED);
-    mOperatingMode = NO_MODE;
-    mIsConstrainedHighSpeedConfiguration = false;
-    mRequestThread->clearPreviousRequest();
-
-    return OK;
-    // TO be done by CameraDeviceClient/Camera3OfflineSession
-    // register the offline client to camera service
-    // Setup result passthing threads etc
-    // Initialize offline session so HAL can start sending callback to it (result Fmq)
-    // TODO: check how many onIdle callback will be sent
-    // Java side to make sure the CameraCaptureSession is properly closed
-}
-
 void Camera3Device::getOfflineStreamIds(std::vector<int> *offlineStreamIds) {
     ATRACE_CALL();
 
@@ -6649,6 +5132,13 @@
     ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.string());
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
+    // When the camera device is active, injectCamera() and stopInjection() will call
+    // internalPauseAndWaitLocked() and internalResumeLocked(), and then they will call
+    // mStatusChanged.waitRelative(mLock, timeout) of waitUntilStateThenRelock(). But
+    // mStatusChanged.waitRelative(mLock, timeout)'s parameter: mutex "mLock" must be in the locked
+    // state, so we need to add "Mutex::Autolock l(mLock)" to lock the "mLock" before calling
+    // waitUntilStateThenRelock().
+    Mutex::Autolock l(mLock);
 
     status_t res = NO_ERROR;
     if (mInjectionMethods->isInjecting()) {
@@ -6664,23 +5154,32 @@
         }
     }
 
-    res = mInjectionMethods->injectionInitialize(injectedCamId, manager, this);
+    res = injectionCameraInitialize(injectedCamId, manager);
     if (res != OK) {
         ALOGE("%s: Failed to initialize the injection camera! ret != NO_ERROR: %d",
                 __FUNCTION__, res);
         return res;
     }
 
-    camera3::camera_stream_configuration injectionConfig;
-    std::vector<uint32_t> injectionBufferSizes;
-    mInjectionMethods->getInjectionConfig(&injectionConfig, &injectionBufferSizes);
     // When the second display of android is cast to the remote device, and the opened camera is
     // also cast to the second display, in this case, because the camera has configured the streams
     // at this time, we can directly call injectCamera() to replace the internal camera with
     // injection camera.
-    if (mOperatingMode >= 0 && injectionConfig.num_streams > 0
-                && injectionBufferSizes.size() > 0) {
-        ALOGV("%s: The opened camera is directly cast to the remote device.", __FUNCTION__);
+    if (mInjectionMethods->isStreamConfigCompleteButNotInjected()) {
+        ALOGD("%s: The opened camera is directly cast to the remote device.", __FUNCTION__);
+
+        camera3::camera_stream_configuration injectionConfig;
+        std::vector<uint32_t> injectionBufferSizes;
+        mInjectionMethods->getInjectionConfig(&injectionConfig, &injectionBufferSizes);
+        if (mOperatingMode < 0 || injectionConfig.num_streams <= 0
+                    || injectionBufferSizes.size() <= 0) {
+            ALOGE("Failed to inject camera due to abandoned configuration! "
+                    "mOperatingMode: %d injectionConfig.num_streams: %d "
+                    "injectionBufferSizes.size(): %zu", mOperatingMode,
+                    injectionConfig.num_streams, injectionBufferSizes.size());
+            return DEAD_OBJECT;
+        }
+
         res = mInjectionMethods->injectCamera(
                 injectionConfig, injectionBufferSizes);
         if (res != OK) {
@@ -6695,6 +5194,7 @@
 status_t Camera3Device::stopInjection() {
     ALOGI("%s: Injection camera: stopInjection", __FUNCTION__);
     Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
     return mInjectionMethods->stopInjection();
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index e8a6a08..f927b4d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -29,20 +29,11 @@
 #include <utils/KeyedVector.h>
 #include <utils/Timers.h>
 
-#include <android/hardware/camera/device/3.2/ICameraDevice.h>
-#include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.3/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.6/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
-#include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
-#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
-#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
-#include <fmq/MessageQueue.h>
-
 #include <camera/CaptureResult.h>
 
+#include "CameraServiceWatchdog.h"
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+
 #include "common/CameraDeviceBase.h"
 #include "device3/BufferUtils.h"
 #include "device3/StatusTracker.h"
@@ -56,11 +47,11 @@
 #include "device3/Camera3OfflineSession.h"
 #include "device3/Camera3StreamInterface.h"
 #include "utils/TagMonitor.h"
+#include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
 #include <camera_metadata_hidden.h>
 
 using android::camera3::camera_capture_request_t;
-using android::camera3::camera_jpeg_blob_t;
 using android::camera3::camera_request_template;
 using android::camera3::camera_stream_buffer_t;
 using android::camera3::camera_stream_configuration_t;
@@ -83,16 +74,24 @@
  */
 class Camera3Device :
             public CameraDeviceBase,
-            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
             public camera3::SetErrorInterface,
             public camera3::InflightRequestUpdateInterface,
             public camera3::RequestBufferInterface,
             public camera3::FlushBufferInterface {
+  friend class HidlCamera3Device;
+  friend class AidlCamera3Device;
   public:
 
     explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool legacyClient = false);
 
     virtual ~Camera3Device();
+    // Delete and optionally close native handles and clear the input vector afterward
+    static void cleanupNativeHandles(
+            std::vector<native_handle_t*> *handles, bool closeFd = false);
+
+    virtual IPCTransport getTransportType() const override {
+        return mInterface->getTransportType();
+    }
 
     /**
      * CameraDeviceBase interface
@@ -102,10 +101,18 @@
 
     metadata_vendor_id_t getVendorTagId() const override { return mVendorTagId; }
 
+    // Watchdog thread
+    sp<CameraServiceWatchdog> mCameraServiceWatchdog;
+
     // Transitions to idle state on success.
-    status_t initialize(sp<CameraProviderManager> manager, const String8& monitorTags) override;
+    virtual status_t initialize(sp<CameraProviderManager> /*manager*/,
+            const String8& /*monitorTags*/) = 0;
+
     status_t disconnect() override;
     status_t dump(int fd, const Vector<String16> &args) override;
+    status_t startWatchingTags(const String8 &tags) override;
+    status_t stopWatchingTags() override;
+    status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
     const CameraMetadata& info() const override;
     const CameraMetadata& infoPhysical(const String8& physicalId) const override;
 
@@ -137,7 +144,12 @@
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
-            uint64_t consumerUsage = 0) override;
+            uint64_t consumerUsage = 0,
+            int64_t dynamicRangeProfile =
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
 
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -147,7 +159,12 @@
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
             bool isShared = false, bool isMultiResolution = false,
-            uint64_t consumerUsage = 0) override;
+            uint64_t consumerUsage = 0,
+            int64_t dynamicRangeProfile =
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
 
     status_t createInputStream(
             uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -225,8 +242,10 @@
 
     nsecs_t getExpectedInFlightDuration() override;
 
-    status_t switchToOffline(const std::vector<int32_t>& streamsToKeep,
-            /*out*/ sp<CameraOfflineSessionBase>* session) override;
+    virtual status_t switchToOffline(const std::vector<int32_t>& ,
+            /*out*/ sp<CameraOfflineSessionBase>* )  override {
+      return INVALID_OPERATION;
+    };
 
     // RequestBufferInterface
     bool startRequestBuffer() override;
@@ -277,32 +296,11 @@
      */
     status_t stopInjection();
 
-    /**
-     * Helper functions to map between framework and HIDL values
-     */
-    static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
-    static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
-            android_dataspace dataSpace);
-    static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint64_t usage);
-    static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
-            camera_stream_rotation_t rotation);
-    // Returns a negative error code if the passed-in operation mode is not valid.
-    static status_t mapToStreamConfigurationMode(camera_stream_configuration_mode_t operationMode,
-            /*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
-    static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
-    static android_dataspace mapToFrameworkDataspace(
-            hardware::camera::device::V3_2::DataspaceFlags);
-    static uint64_t mapConsumerToFrameworkUsage(
-            hardware::camera::device::V3_2::BufferUsageFlags usage);
-    static uint64_t mapProducerToFrameworkUsage(
-            hardware::camera::device::V3_2::BufferUsageFlags usage);
-
-  private:
+  protected:
     status_t disconnectImpl();
     static status_t removeFwkOnlyRegionKeys(CameraMetadata *request);
 
-    // internal typedefs
-    using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
+    float getMaxPreviewFps(sp<camera3::Camera3OutputStreamInterface> stream);
 
     static const size_t        kDumpLockAttempts  = 10;
     static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
@@ -310,7 +308,6 @@
     static const nsecs_t       kMinWarnInflightDuration = 5000000000; // 5 s
     static const size_t        kInFlightWarnLimit = 30;
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
-    static const nsecs_t       kDefaultExpectedDuration = 100000000; // 100 ms
     static const nsecs_t       kMinInflightDuration = 5000000000; // 5 s
     static const nsecs_t       kBaseGetBufferWait = 3000000000; // 3 sec.
     // SCHED_FIFO priority for request submission thread in HFR mode
@@ -318,7 +315,8 @@
 
     struct                     RequestTrigger;
     // minimal jpeg buffer size: 256KB + blob header
-    static const ssize_t       kMinJpegBufferSize = 256 * 1024 + sizeof(camera_jpeg_blob_t);
+    static const ssize_t       kMinJpegBufferSize =
+            256 * 1024 + sizeof(aidl::android::hardware::camera::device::CameraBlob);
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
@@ -349,68 +347,65 @@
     // Flag indicating is the current active stream configuration is constrained high speed.
     bool                       mIsConstrainedHighSpeedConfiguration;
 
-    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
-    std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
-
     /**** Scope for mLock ****/
 
-    /**
-     * Adapter for legacy HAL / HIDL HAL interface calls; calls either into legacy HALv3 or the
-     * HIDL HALv3 interfaces.
-     */
     class HalInterface : public camera3::Camera3StreamBufferFreedListener,
             public camera3::BufferRecordsInterface {
       public:
-        HalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session,
-                     std::shared_ptr<RequestMetadataQueue> queue,
-                     bool useHalBufManager, bool supportOfflineProcessing);
+        HalInterface(bool useHalBufManager, bool supportOfflineProcessing) :
+              mUseHalBufManager(useHalBufManager),
+              mIsReconfigurationQuerySupported(true),
+              mSupportOfflineProcessing(supportOfflineProcessing)
+               {};
         HalInterface(const HalInterface &other);
         HalInterface();
 
+        virtual IPCTransport getTransportType() const = 0;
+
         // Returns true if constructed with a valid device or session, and not yet cleared
-        bool valid();
+        virtual bool valid() = 0;
 
         // Reset this HalInterface object (does not call close())
-        void clear();
+        virtual void clear() = 0;
 
         // Calls into the HAL interface
 
         // Caller takes ownership of requestTemplate
-        status_t constructDefaultRequestSettings(camera_request_template templateId,
-                /*out*/ camera_metadata_t **requestTemplate);
-        status_t configureStreams(const camera_metadata_t *sessionParams,
-                /*inout*/ camera_stream_configuration_t *config,
-                const std::vector<uint32_t>& bufferSizes);
+        virtual status_t constructDefaultRequestSettings(camera_request_template templateId,
+                /*out*/ camera_metadata_t **requestTemplate) = 0;
+
+        virtual status_t configureStreams(const camera_metadata_t * sessionParams,
+                /*inout*/ camera_stream_configuration_t * config,
+                const std::vector<uint32_t>& bufferSizes) = 0;
 
         // The injection camera configures the streams to hal.
-        status_t configureInjectedStreams(
+        virtual status_t configureInjectedStreams(
                 const camera_metadata_t* sessionParams,
                 /*inout*/ camera_stream_configuration_t* config,
                 const std::vector<uint32_t>& bufferSizes,
-                const CameraMetadata& cameraCharacteristics);
+                const CameraMetadata& cameraCharacteristics) = 0;
 
         // When the call succeeds, the ownership of acquire fences in requests is transferred to
         // HalInterface. More specifically, the current implementation will send the fence to
         // HAL process and close the FD in cameraserver process. When the call fails, the ownership
         // of the acquire fence still belongs to the caller.
-        status_t processBatchCaptureRequests(
+        virtual status_t processBatchCaptureRequests(
                 std::vector<camera_capture_request_t*>& requests,
-                /*out*/uint32_t* numRequestProcessed);
-        status_t flush();
-        status_t dump(int fd);
-        status_t close();
+                /*out*/uint32_t* numRequestProcessed) = 0;
 
-        void signalPipelineDrain(const std::vector<int>& streamIds);
-        bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
-                CameraMetadata& newSessionParams);
+        virtual status_t flush() = 0;
 
-        // Upon successful return, HalInterface will return buffer maps needed for offline
-        // processing, and clear all its internal buffer maps.
-        status_t switchToOffline(
-                const std::vector<int32_t>& streamsToKeep,
-                /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
-                /*out*/camera3::BufferRecords* bufferRecords);
+        virtual status_t dump(int fd) = 0;
+
+        virtual status_t close() = 0;
+
+        virtual void signalPipelineDrain(const std::vector<int>& streamIds) = 0;
+
+        virtual bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
+                CameraMetadata& newSessionParams) = 0;
+
+        virtual status_t repeatingRequestEnd(uint32_t frameNumber,
+                const std::vector<int32_t> &streamIds) = 0;
 
         /////////////////////////////////////////////////////////////////////
         // Implements BufferRecordsInterface
@@ -441,41 +436,35 @@
 
         void onStreamReConfigured(int streamId);
 
-      private:
-        // Always valid
-        sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
-        // Valid if ICameraDeviceSession is @3.3 or newer
-        sp<hardware::camera::device::V3_3::ICameraDeviceSession> mHidlSession_3_3;
-        // Valid if ICameraDeviceSession is @3.4 or newer
-        sp<hardware::camera::device::V3_4::ICameraDeviceSession> mHidlSession_3_4;
-        // Valid if ICameraDeviceSession is @3.5 or newer
-        sp<hardware::camera::device::V3_5::ICameraDeviceSession> mHidlSession_3_5;
-        // Valid if ICameraDeviceSession is @3.6 or newer
-        sp<hardware::camera::device::V3_6::ICameraDeviceSession> mHidlSession_3_6;
-        // Valid if ICameraDeviceSession is @3.7 or newer
-        sp<hardware::camera::device::V3_7::ICameraDeviceSession> mHidlSession_3_7;
-
-        std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
-
-        // The output HIDL request still depends on input camera_capture_request_t
-        // Do not free input camera_capture_request_t before output HIDL request
-        status_t wrapAsHidlRequest(camera_capture_request_t* in,
-                /*out*/hardware::camera::device::V3_2::CaptureRequest* out,
-                /*out*/std::vector<native_handle_t*>* handlesCreated,
-                /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers);
-
-        status_t pushInflightBufferLocked(int32_t frameNumber, int32_t streamId,
-                buffer_handle_t *buffer);
-
-        // Pop inflight buffers based on pairs of (frameNumber,streamId)
-        void popInflightBuffers(const std::vector<std::pair<int32_t, int32_t>>& buffers);
+      protected:
 
         // Return true if the input caches match what we have; otherwise false
         bool verifyBufferIds(int32_t streamId, std::vector<uint64_t>& inBufIds);
 
-        // Delete and optionally close native handles and clear the input vector afterward
-        static void cleanupNativeHandles(
-                std::vector<native_handle_t*> *handles, bool closeFd = false);
+        template <typename OfflineSessionInfoT>
+        status_t verifyBufferCaches(
+            const OfflineSessionInfoT *offlineSessionInfo, camera3::BufferRecords *bufferRecords) {
+            // Validate buffer caches
+            std::vector<int32_t> streams;
+            streams.reserve(offlineSessionInfo->offlineStreams.size());
+            for (auto offlineStream : offlineSessionInfo->offlineStreams) {
+                int32_t id = offlineStream.id;
+                streams.push_back(id);
+                // Verify buffer caches
+                std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
+                        offlineStream.circulatingBufferIds.end());
+                if (!verifyBufferIds(id, bufIds)) {
+                    ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
+                    return UNKNOWN_ERROR;
+                }
+            }
+
+            // Move buffer records
+            bufferRecords->takeBufferCaches(mBufferRecords, streams);
+            bufferRecords->takeInflightBufferMap(mBufferRecords);
+            bufferRecords->takeRequestedBufferMap(mBufferRecords);
+            return OK;
+        }
 
         virtual void onBufferFreed(int streamId, const native_handle_t* handle) override;
 
@@ -491,7 +480,7 @@
         bool mIsReconfigurationQuerySupported;
 
         const bool mSupportOfflineProcessing;
-    };
+    }; // class HalInterface
 
     sp<HalInterface> mInterface;
 
@@ -551,6 +540,7 @@
 
     /**** End scope for mLock ****/
 
+    bool                       mDeviceTimeBaseIsRealtime;
     // The offset converting from clock domain of other subsystem
     // (video/hardware composer) to that of camera. Assumption is that this
     // offset won't change during the life cycle of the camera device. In other
@@ -617,33 +607,6 @@
                                   bool repeating,
                                   int64_t *lastFrameNumber = NULL);
 
-
-    /**
-     * Implementation of android::hardware::camera::device::V3_5::ICameraDeviceCallback
-     */
-
-    hardware::Return<void> processCaptureResult_3_4(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_4::CaptureResult>& results) override;
-    hardware::Return<void> processCaptureResult(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_2::CaptureResult>& results) override;
-    hardware::Return<void> notify(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_2::NotifyMsg>& msgs) override;
-
-    hardware::Return<void> requestStreamBuffers(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_5::BufferRequest>& bufReqs,
-            requestStreamBuffers_cb _hidl_cb) override;
-
-    hardware::Return<void> returnStreamBuffers(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
-
-    // Handle one notify message
-    void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
-
     // lock to ensure only one processCaptureResult is called at a time.
     Mutex mProcessCaptureResultLock;
 
@@ -661,6 +624,9 @@
      */
     virtual CameraMetadata getLatestRequestLocked();
 
+    virtual status_t injectionCameraInitialize(const String8 &injectCamId,
+            sp<CameraProviderManager> manager) = 0;
+
     /**
      * Update the current device status and wake all waiting threads.
      *
@@ -909,12 +875,6 @@
         void signalPipelineDrain(const std::vector<int>& streamIds);
         void resetPipelineDrain();
 
-        status_t switchToOffline(
-                const std::vector<int32_t>& streamsToKeep,
-                /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
-                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
-                /*out*/camera3::BufferRecords* bufferRecords);
-
         void clearPreviousRequest();
 
         status_t setRotateAndCropAutoBehavior(
@@ -929,7 +889,6 @@
 
         virtual bool threadLoop();
 
-      private:
         static const String8& getId(const wp<Camera3Device> &device);
 
         status_t           queueTriggerLocked(RequestTrigger trigger);
@@ -1003,8 +962,9 @@
         // send request in mNextRequests to HAL in a batch. Return true = sucssess
         bool sendRequestsBatch();
 
-        // Calculate the expected maximum duration for a request
-        nsecs_t calculateMaxExpectedDuration(const camera_metadata_t *request);
+        // Calculate the expected (minimum, maximum) duration range for a request
+        std::pair<nsecs_t, nsecs_t> calculateExpectedDurationRange(
+                const camera_metadata_t *request);
 
         // Check and update latest session parameters based on the current request settings.
         bool updateSessionParameters(const CameraMetadata& settings);
@@ -1029,6 +989,8 @@
 
         Mutex              mRequestLock;
         Condition          mRequestSignal;
+        bool               mRequestClearing;
+
         Condition          mRequestSubmittedSignal;
         RequestList        mRequestQueue;
         RequestList        mRepeatingRequests;
@@ -1094,6 +1056,14 @@
         const bool         mUseHalBufManager;
         const bool         mSupportCameraMute;
     };
+
+    virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
+                sp<camera3::StatusTracker> /*statusTracker*/,
+                sp<HalInterface> /*interface*/,
+                const Vector<int32_t>& /*sessionParamKeys*/,
+                bool /*useHalBufManager*/,
+                bool /*supportCameraMute*/) = 0;
+
     sp<RequestThread> mRequestThread;
 
     /**
@@ -1111,7 +1081,7 @@
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-            bool callback, nsecs_t maxExpectedDuration,
+            bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
             const std::set<std::set<String8>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
@@ -1254,7 +1224,9 @@
 
     void monitorMetadata(TagMonitor::eventSource source, int64_t frameNumber,
             nsecs_t timestamp, const CameraMetadata& metadata,
-            const std::unordered_map<std::string, CameraMetadata>& physicalMetadata);
+            const std::unordered_map<std::string, CameraMetadata>& physicalMetadata,
+            const camera_stream_buffer_t *outputBuffers, uint32_t numOutputBuffers,
+            int32_t inputStreamId);
 
     metadata_vendor_id_t mVendorTagId;
 
@@ -1360,6 +1332,9 @@
     // performance class.
     bool mOverrideForPerfClass;
 
+    // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
+    nsecs_t mMinExpectedDuration = 0;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
@@ -1367,44 +1342,46 @@
 
         ~Camera3DeviceInjectionMethods();
 
-        // Initialize the injection camera and generate an hal interface.
-        status_t injectionInitialize(
-                const String8& injectedCamId, sp<CameraProviderManager> manager,
-                const sp<
-                    android::hardware::camera::device::V3_2 ::ICameraDeviceCallback>&
-                    callback);
-
         // Injection camera will replace the internal camera and configure streams
         // when device is IDLE and request thread is paused.
         status_t injectCamera(
                 camera3::camera_stream_configuration& injectionConfig,
-                std::vector<uint32_t>& injectionBufferSizes);
+                const std::vector<uint32_t>& injectionBufferSizes);
 
         // Stop the injection camera and switch back to backup hal interface.
         status_t stopInjection();
 
         bool isInjecting();
 
+        bool isStreamConfigCompleteButNotInjected();
+
         const String8& getInjectedCamId() const;
 
         void getInjectionConfig(/*out*/ camera3::camera_stream_configuration* injectionConfig,
                 /*out*/ std::vector<uint32_t>* injectionBufferSizes);
 
-      private:
+        // When the streaming configuration is completed and the camera device is active, but the
+        // injection camera has not yet been injected, the streaming configuration of the internal
+        // camera will be stored first.
+        void storeInjectionConfig(
+                const camera3::camera_stream_configuration& injectionConfig,
+                const std::vector<uint32_t>& injectionBufferSizes);
+
+      protected:
         // Configure the streams of injection camera, it need wait until the
         // output streams are created and configured to the original camera before
         // proceeding.
         status_t injectionConfigureStreams(
                 camera3::camera_stream_configuration& injectionConfig,
-                std::vector<uint32_t>& injectionBufferSizes);
+                const std::vector<uint32_t>& injectionBufferSizes);
 
         // Disconnect the injection camera and delete the hal interface.
         void injectionDisconnectImpl();
 
         // Use injection camera hal interface to replace and backup original
         // camera hal interface.
-        status_t replaceHalInterface(sp<HalInterface> newHalInterface,
-                bool keepBackup);
+        virtual status_t replaceHalInterface(sp<HalInterface> /*newHalInterface*/,
+                bool /*keepBackup*/) = 0;
 
         wp<Camera3Device> mParent;
 
@@ -1414,9 +1391,16 @@
         // Generated injection camera hal interface.
         sp<HalInterface> mInjectedCamHalInterface;
 
+        // The flag indicates that the stream configuration is complete, the camera device is
+        // active, but the injection camera has not yet been injected.
+        bool mIsStreamConfigCompleteButNotInjected = false;
+
         // Copy the configuration of the internal camera.
         camera3::camera_stream_configuration mInjectionConfig;
 
+        // Copy the streams of the internal camera.
+        Vector<camera3::camera_stream_t*> mInjectionStreams;
+
         // Copy the bufferSizes of the output streams of the internal camera.
         std::vector<uint32_t> mInjectionBufferSizes;
 
@@ -1427,6 +1411,10 @@
         // The injection camera ID.
         String8 mInjectedCamId;
     };
+
+    virtual sp<Camera3DeviceInjectionMethods>
+            createCamera3DeviceInjectionMethods(wp<Camera3Device>) = 0;
+
     sp<Camera3DeviceInjectionMethods> mInjectionMethods;
 
 }; // class Camera3Device
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
index f145dac..031c255 100644
--- a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -26,8 +26,6 @@
 
 namespace android {
 
-using hardware::camera::device::V3_2::ICameraDeviceSession;
-
 Camera3Device::Camera3DeviceInjectionMethods::Camera3DeviceInjectionMethods(
         wp<Camera3Device> parent)
         : mParent(parent) {
@@ -39,98 +37,10 @@
     injectionDisconnectImpl();
 }
 
-status_t Camera3Device::Camera3DeviceInjectionMethods::injectionInitialize(
-        const String8& injectedCamId, sp<CameraProviderManager> manager,
-        const sp<android::hardware::camera::device::V3_2::ICameraDeviceCallback>&
-                callback) {
-    ATRACE_CALL();
-    Mutex::Autolock lock(mInjectionLock);
-
-    if (manager == nullptr) {
-        ALOGE("%s: manager does not exist!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    sp<Camera3Device> parent = mParent.promote();
-    if (parent == nullptr) {
-        ALOGE("%s: parent does not exist!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    mInjectedCamId = injectedCamId;
-    sp<ICameraDeviceSession> session;
-    ATRACE_BEGIN("Injection CameraHal::openSession");
-    status_t res = manager->openSession(injectedCamId.string(), callback,
-                                          /*out*/ &session);
-    ATRACE_END();
-    if (res != OK) {
-        ALOGE("Injection camera could not open camera session: %s (%d)",
-                strerror(-res), res);
-        return res;
-    }
-
-    std::shared_ptr<RequestMetadataQueue> queue;
-    auto requestQueueRet =
-        session->getCaptureRequestMetadataQueue([&queue](const auto& descriptor) {
-            queue = std::make_shared<RequestMetadataQueue>(descriptor);
-            if (!queue->isValid() || queue->availableToWrite() <= 0) {
-                ALOGE("Injection camera HAL returns empty request metadata fmq, not "
-                        "use it");
-                queue = nullptr;
-                // don't use the queue onwards.
-            }
-        });
-    if (!requestQueueRet.isOk()) {
-        ALOGE("Injection camera transaction error when getting request metadata fmq: "
-                "%s, not use it", requestQueueRet.description().c_str());
-        return DEAD_OBJECT;
-    }
-
-    std::unique_ptr<ResultMetadataQueue>& resQueue = parent->mResultMetadataQueue;
-    auto resultQueueRet = session->getCaptureResultMetadataQueue(
-        [&resQueue](const auto& descriptor) {
-            resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
-            if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
-                ALOGE("Injection camera HAL returns empty result metadata fmq, not use "
-                        "it");
-                resQueue = nullptr;
-                // Don't use the resQueue onwards.
-            }
-        });
-    if (!resultQueueRet.isOk()) {
-        ALOGE("Injection camera transaction error when getting result metadata queue "
-                "from camera session: %s", resultQueueRet.description().c_str());
-        return DEAD_OBJECT;
-    }
-    IF_ALOGV() {
-        session->interfaceChain(
-                [](::android::hardware::hidl_vec<::android::hardware::hidl_string>
-                        interfaceChain) {
-                        ALOGV("Injection camera session interface chain:");
-                        for (const auto& iface : interfaceChain) {
-                            ALOGV("  %s", iface.c_str());
-                        }
-                });
-    }
-
-    ALOGV("%s: Injection camera interface = new HalInterface()", __FUNCTION__);
-    mInjectedCamHalInterface =
-            new HalInterface(session, queue, parent->mUseHalBufManager,
-                       parent->mSupportOfflineProcessing);
-    if (mInjectedCamHalInterface == nullptr) {
-        ALOGE("%s: mInjectedCamHalInterface does not exist!", __FUNCTION__);
-        return DEAD_OBJECT;
-    }
-
-    return OK;
-}
-
 status_t Camera3Device::Camera3DeviceInjectionMethods::injectCamera(
         camera3::camera_stream_configuration& injectionConfig,
-        std::vector<uint32_t>& injectionBufferSizes) {
+        const std::vector<uint32_t>& injectionBufferSizes) {
     status_t res = NO_ERROR;
-    mInjectionConfig = injectionConfig;
-    mInjectionBufferSizes = injectionBufferSizes;
 
     if (mInjectedCamHalInterface == nullptr) {
         ALOGE("%s: mInjectedCamHalInterface does not exist!", __FUNCTION__);
@@ -148,7 +58,6 @@
     if (parent->mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Let the device be IDLE and the request thread is paused",
                 __FUNCTION__);
-        parent->mPauseStateNotify = true;
         res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
         if (res != OK) {
             ALOGE("%s: Can't pause captures to inject camera!", __FUNCTION__);
@@ -188,7 +97,7 @@
         ALOGV("%s: Restarting activity to inject camera", __FUNCTION__);
         // Reuse current operating mode and session parameters for new stream
         // config.
-        parent->internalUpdateStatusLocked(STATUS_ACTIVE);
+        parent->internalResumeLocked();
     }
 
     return OK;
@@ -208,7 +117,6 @@
     if (parent->mStatus == STATUS_ACTIVE) {
         ALOGV("%s: Let the device be IDLE and the request thread is paused",
                 __FUNCTION__);
-        parent->mPauseStateNotify = true;
         res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
         if (res != OK) {
             ALOGE("%s: Can't pause captures to stop injection!", __FUNCTION__);
@@ -229,7 +137,7 @@
         ALOGV("%s: Restarting activity to stop injection", __FUNCTION__);
         // Reuse current operating mode and session parameters for new stream
         // config.
-        parent->internalUpdateStatusLocked(STATUS_ACTIVE);
+        parent->internalResumeLocked();
     }
 
     return OK;
@@ -243,6 +151,10 @@
     }
 }
 
+bool Camera3Device::Camera3DeviceInjectionMethods::isStreamConfigCompleteButNotInjected() {
+    return mIsStreamConfigCompleteButNotInjected;
+}
+
 const String8& Camera3Device::Camera3DeviceInjectionMethods::getInjectedCamId()
         const {
     return mInjectedCamId;
@@ -260,10 +172,22 @@
     *injectionBufferSizes = mInjectionBufferSizes;
 }
 
+void Camera3Device::Camera3DeviceInjectionMethods::storeInjectionConfig(
+        const camera3::camera_stream_configuration& injectionConfig,
+        const std::vector<uint32_t>& injectionBufferSizes) {
+    mIsStreamConfigCompleteButNotInjected = true;
+    mInjectionConfig = injectionConfig;
+    mInjectionStreams.clear();
+    for (size_t i = 0; i < injectionConfig.num_streams; i++) {
+        mInjectionStreams.push_back(injectionConfig.streams[i]);
+    }
+    mInjectionConfig.streams = mInjectionStreams.editArray();
+    mInjectionBufferSizes = injectionBufferSizes;
+}
 
 status_t Camera3Device::Camera3DeviceInjectionMethods::injectionConfigureStreams(
         camera3::camera_stream_configuration& injectionConfig,
-        std::vector<uint32_t>& injectionBufferSizes) {
+        const std::vector<uint32_t>& injectionBufferSizes) {
     ATRACE_CALL();
     status_t res = NO_ERROR;
 
@@ -326,7 +250,6 @@
             mInjectedCamId.string());
 
     auto rc = parent->mPreparerThread->resume();
-
     if (rc != OK) {
         ALOGE("%s: Injection camera %s: Preparer thread failed to resume!",
                  __FUNCTION__, mInjectedCamId.string());
@@ -339,6 +262,9 @@
 void Camera3Device::Camera3DeviceInjectionMethods::injectionDisconnectImpl() {
     ATRACE_CALL();
     ALOGI("%s: Injection camera disconnect", __FUNCTION__);
+    mIsStreamConfigCompleteButNotInjected = false;
+    mInjectionStreams.clear();
+    mInjectionConfig.streams = nullptr;
 
     mBackupHalInterface = nullptr;
     HalInterface* interface = nullptr;
@@ -365,29 +291,4 @@
     }
 }
 
-status_t Camera3Device::Camera3DeviceInjectionMethods::replaceHalInterface(
-        sp<HalInterface> newHalInterface, bool keepBackup) {
-    Mutex::Autolock lock(mInjectionLock);
-    if (newHalInterface.get() == nullptr) {
-        ALOGE("%s: The newHalInterface does not exist, to stop replacing.",
-                __FUNCTION__);
-        return DEAD_OBJECT;
-    }
-
-    sp<Camera3Device> parent = mParent.promote();
-    if (parent == nullptr) {
-        ALOGE("%s: parent does not exist!", __FUNCTION__);
-        return INVALID_OPERATION;
-    }
-
-    if (keepBackup && mBackupHalInterface == nullptr) {
-        mBackupHalInterface = parent->mInterface;
-    } else if (!keepBackup) {
-        mBackupHalInterface = nullptr;
-    }
-    parent->mInterface = newHalInterface;
-
-    return OK;
-}
-
 };  // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index b121e5d..19afd69 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -48,7 +48,7 @@
 
 status_t Camera3FakeStream::returnBufferLocked(
         const camera_stream_buffer &,
-        nsecs_t, int32_t, const std::vector<size_t>&) {
+        nsecs_t, nsecs_t, int32_t, const std::vector<size_t>&) {
     ATRACE_CALL();
     ALOGE("%s: Stream %d: Fake stream cannot return buffers!", __FUNCTION__, mId);
     return INVALID_OPERATION;
@@ -56,7 +56,7 @@
 
 status_t Camera3FakeStream::returnBufferCheckedLocked(
             const camera_stream_buffer &,
-            nsecs_t,
+            nsecs_t, nsecs_t,
             bool,
             int32_t,
             const std::vector<size_t>&,
@@ -76,7 +76,7 @@
     Camera3IOStreamBase::dump(fd, args);
 }
 
-status_t Camera3FakeStream::setTransform(int) {
+status_t Camera3FakeStream::setTransform(int, bool) {
     ATRACE_CALL();
     // Do nothing
     return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index c11a3e4..8cecabd 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -52,7 +52,7 @@
 
     virtual void     dump(int fd, const Vector<String16> &args) const;
 
-    status_t         setTransform(int transform);
+    status_t         setTransform(int transform, bool mayChangeMirror);
 
     virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
 
@@ -100,6 +100,7 @@
 
     virtual status_t setBatchSize(size_t batchSize) override;
 
+    virtual void onMinDurationChanged(nsecs_t /*duration*/) {}
   protected:
 
     /**
@@ -108,6 +109,7 @@
     virtual status_t returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
@@ -135,7 +137,8 @@
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
     virtual status_t returnBufferLocked(
             const camera_stream_buffer &buffer,
-            nsecs_t timestamp, int32_t transform, const std::vector<size_t>& surface_ids);
+            nsecs_t timestamp, nsecs_t readoutTimestamp, int32_t transform,
+            const std::vector<size_t>& surface_ids);
 
     virtual status_t configureQueueLocked();
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 5f7e4cf..add1483 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,10 +34,12 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution) :
+        int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
+        bool deviceTimeBaseIsRealtime, int timestampBase) :
         Camera3Stream(id, type,
                 width, height, maxSize, format, dataSpace, rotation,
-                physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
+                physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
+                dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
         mTotalBufferCount(0),
         mHandoutTotalBufferCount(0),
         mHandoutOutputBufferCount(0),
@@ -82,11 +84,15 @@
             camera_stream::width, camera_stream::height,
             camera_stream::format, camera_stream::data_space);
     lines.appendFormat("      Max size: %zu\n", mMaxSize);
-    lines.appendFormat("      Combined usage: %" PRIu64 ", max HAL buffers: %d\n",
+    lines.appendFormat("      Combined usage: 0x%" PRIx64 ", max HAL buffers: %d\n",
             mUsage | consumerUsage, camera_stream::max_buffers);
     if (strlen(camera_stream::physical_camera_id) > 0) {
         lines.appendFormat("      Physical camera id: %s\n", camera_stream::physical_camera_id);
     }
+    lines.appendFormat("      Dynamic Range Profile: 0x%" PRIx64 "\n",
+            camera_stream::dynamic_range_profile);
+    lines.appendFormat("      Stream use case: %" PRId64 "\n", camera_stream::use_case);
+    lines.appendFormat("      Timestamp base: %d\n", getTimestampBase());
     lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
     lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu\n",
@@ -224,6 +230,7 @@
 status_t Camera3IOStreamBase::returnAnyBufferLocked(
         const camera_stream_buffer &buffer,
         nsecs_t timestamp,
+        nsecs_t readoutTimestamp,
         bool output,
         int32_t transform,
         const std::vector<size_t>& surface_ids) {
@@ -242,7 +249,8 @@
     }
 
     sp<Fence> releaseFence;
-    res = returnBufferCheckedLocked(buffer, timestamp, output, transform, surface_ids,
+    res = returnBufferCheckedLocked(buffer, timestamp, readoutTimestamp,
+                                    output, transform, surface_ids,
                                     &releaseFence);
     // Res may be an error, but we still want to decrement our owned count
     // to enable clean shutdown. So we'll just return the error but otherwise
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 6135b7e..f389d53 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -37,7 +37,11 @@
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            bool deviceTimeBaseIsRealtime = false,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
 
   public:
 
@@ -67,6 +71,7 @@
     status_t         returnAnyBufferLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
@@ -74,6 +79,7 @@
     virtual status_t returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 6eb798e..9a3f7ed 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -105,6 +105,7 @@
 status_t Camera3InputStream::returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t /*transform*/,
             const std::vector<size_t>&,
@@ -112,6 +113,7 @@
             sp<Fence> *releaseFenceOut) {
 
     (void)timestamp;
+    (void)readoutTimestamp;
     (void)output;
     ALOG_ASSERT(!output, "Expected output to be false");
 
@@ -176,7 +178,8 @@
         const camera_stream_buffer &buffer) {
     ATRACE_CALL();
 
-    return returnAnyBufferLocked(buffer, /*timestamp*/0, /*output*/false, /*transform*/ -1);
+    return returnAnyBufferLocked(buffer, /*timestamp*/0, /*readoutTimestamp*/0,
+                                 /*output*/false, /*transform*/ -1);
 }
 
 status_t Camera3InputStream::getInputBufferProducerLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 6f66bca..5e0587b 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -61,6 +61,7 @@
     virtual status_t returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index b702e20..7cfa255 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -47,14 +47,12 @@
         const camera3::StreamSet& offlineStreamSet,
         camera3::BufferRecords&& bufferRecords,
         const camera3::InFlightRequestMap& offlineReqs,
-        const Camera3OfflineStates& offlineStates,
-        sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession) :
+        const Camera3OfflineStates& offlineStates) :
         mId(id),
         mInputStream(inputStream),
         mOutputStreams(offlineStreamSet),
         mBufferRecords(std::move(bufferRecords)),
         mOfflineReqs(offlineReqs),
-        mSession(offlineSession),
         mTagMonitor(offlineStates.mTagMonitor),
         mVendorTagId(offlineStates.mVendorTagId),
         mUseHalBufManager(offlineStates.mUseHalBufManager),
@@ -90,43 +88,6 @@
     return mId;
 }
 
-status_t Camera3OfflineSession::initialize(wp<NotificationListener> listener) {
-    ATRACE_CALL();
-
-    if (mSession == nullptr) {
-        ALOGE("%s: HIDL session is null!", __FUNCTION__);
-        return DEAD_OBJECT;
-    }
-
-    {
-        std::lock_guard<std::mutex> lock(mLock);
-
-        mListener = listener;
-
-        // setup result FMQ
-        std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
-        auto resultQueueRet = mSession->getCaptureResultMetadataQueue(
-            [&resQueue](const auto& descriptor) {
-                resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
-                if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
-                    ALOGE("HAL returns empty result metadata fmq, not use it");
-                    resQueue = nullptr;
-                    // Don't use resQueue onwards.
-                }
-            });
-        if (!resultQueueRet.isOk()) {
-            ALOGE("Transaction error when getting result metadata queue from camera session: %s",
-                    resultQueueRet.description().c_str());
-            return DEAD_OBJECT;
-        }
-        mStatus = STATUS_ACTIVE;
-    }
-
-    mSession->setCallback(this);
-
-    return OK;
-}
-
 status_t Camera3OfflineSession::dump(int /*fd*/) {
     ATRACE_CALL();
     std::lock_guard<std::mutex> il(mInterfaceLock);
@@ -135,6 +96,7 @@
 
 status_t Camera3OfflineSession::disconnect() {
     ATRACE_CALL();
+    disconnectSession();
     return disconnectImpl();
 }
 
@@ -170,10 +132,6 @@
         streams.push_back(mInputStream);
     }
 
-    if (mSession != nullptr) {
-        mSession->close();
-    }
-
     FlushInflightReqStates states {
         mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
         listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
@@ -182,7 +140,6 @@
 
     {
         std::lock_guard<std::mutex> lock(mLock);
-        mSession.clear();
         mOutputStreams.clear();
         mInputStream.clear();
         mStatus = STATUS_CLOSED;
@@ -235,149 +192,6 @@
     return OK;
 }
 
-hardware::Return<void> Camera3OfflineSession::processCaptureResult_3_4(
-        const hardware::hidl_vec<
-                hardware::camera::device::V3_4::CaptureResult>& results) {
-    sp<NotificationListener> listener;
-    {
-        std::lock_guard<std::mutex> lock(mLock);
-        if (mStatus != STATUS_ACTIVE) {
-            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
-            return hardware::Void();
-        }
-        listener = mListener.promote();
-    }
-
-    CaptureOutputStates states {
-        mId,
-        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
-        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
-        mNextShutterFrameNumber,
-        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-        mNextResultFrameNumber,
-        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
-        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
-        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false
-    };
-
-    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
-    for (const auto& result : results) {
-        processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
-    }
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3OfflineSession::processCaptureResult(
-        const hardware::hidl_vec<
-                hardware::camera::device::V3_2::CaptureResult>& results) {
-    // TODO: changed impl to call into processCaptureResult_3_4 instead?
-    //       might need to figure how to reduce copy though.
-    sp<NotificationListener> listener;
-    {
-        std::lock_guard<std::mutex> lock(mLock);
-        if (mStatus != STATUS_ACTIVE) {
-            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
-            return hardware::Void();
-        }
-        listener = mListener.promote();
-    }
-
-    hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
-
-    CaptureOutputStates states {
-        mId,
-        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
-        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
-        mNextShutterFrameNumber,
-        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-        mNextResultFrameNumber,
-        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
-        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
-        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false
-    };
-
-    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
-    for (const auto& result : results) {
-        processOneCaptureResultLocked(states, result, noPhysMetadata);
-    }
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3OfflineSession::notify(
-        const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
-    sp<NotificationListener> listener;
-    {
-        std::lock_guard<std::mutex> lock(mLock);
-        if (mStatus != STATUS_ACTIVE) {
-            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
-            return hardware::Void();
-        }
-        listener = mListener.promote();
-    }
-
-    CaptureOutputStates states {
-        mId,
-        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
-        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
-        mNextShutterFrameNumber,
-        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-        mNextResultFrameNumber,
-        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
-        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
-        mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
-        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false
-    };
-    for (const auto& msg : msgs) {
-        camera3::notify(states, msg);
-    }
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3OfflineSession::requestStreamBuffers(
-        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
-        requestStreamBuffers_cb _hidl_cb) {
-    {
-        std::lock_guard<std::mutex> lock(mLock);
-        if (mStatus != STATUS_ACTIVE) {
-            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
-            return hardware::Void();
-        }
-    }
-
-    RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
-        *this, mBufferRecords, *this};
-    camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
-    return hardware::Void();
-}
-
-hardware::Return<void> Camera3OfflineSession::returnStreamBuffers(
-        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
-    {
-        std::lock_guard<std::mutex> lock(mLock);
-        if (mStatus != STATUS_ACTIVE) {
-            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
-            return hardware::Void();
-        }
-    }
-
-    ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, mBufferRecords};
-    camera3::returnStreamBuffers(states, buffers);
-    return hardware::Void();
-}
-
 void Camera3OfflineSession::setErrorState(const char *fmt, ...) {
     ATRACE_CALL();
     std::lock_guard<std::mutex> lock(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 5581964..a799719 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -131,7 +131,6 @@
  */
 class Camera3OfflineSession :
             public CameraOfflineSessionBase,
-            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
             public camera3::SetErrorInterface,
             public camera3::InflightRequestUpdateInterface,
             public camera3::RequestBufferInterface,
@@ -144,12 +143,11 @@
             const camera3::StreamSet& offlineStreamSet,
             camera3::BufferRecords&& bufferRecords,
             const camera3::InFlightRequestMap& offlineReqs,
-            const Camera3OfflineStates& offlineStates,
-            sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession);
+            const Camera3OfflineStates& offlineStates);
 
     virtual ~Camera3OfflineSession();
 
-    virtual status_t initialize(wp<NotificationListener> listener) override;
+    virtual status_t initialize(wp<NotificationListener> /*listener*/) = 0;
 
     /**
      * CameraOfflineSessionBase interface
@@ -171,38 +169,7 @@
      * End of CameraOfflineSessionBase interface
      */
 
-    /**
-     * HIDL ICameraDeviceCallback interface
-     */
-
-    /**
-     * Implementation of android::hardware::camera::device::V3_5::ICameraDeviceCallback
-     */
-
-    hardware::Return<void> processCaptureResult_3_4(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_4::CaptureResult>& results) override;
-    hardware::Return<void> processCaptureResult(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_2::CaptureResult>& results) override;
-    hardware::Return<void> notify(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_2::NotifyMsg>& msgs) override;
-
-    hardware::Return<void> requestStreamBuffers(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_5::BufferRequest>& bufReqs,
-            requestStreamBuffers_cb _hidl_cb) override;
-
-    hardware::Return<void> returnStreamBuffers(
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
-
-    /**
-     * End of CameraOfflineSessionBase interface
-     */
-
-  private:
+  protected:
     // Camera device ID
     const String8 mId;
     sp<camera3::Camera3Stream> mInputStream;
@@ -213,8 +180,6 @@
     std::mutex mOfflineReqsLock;
     camera3::InFlightRequestMap mOfflineReqs;
 
-    sp<hardware::camera::device::V3_6::ICameraOfflineSession> mSession;
-
     TagMonitor mTagMonitor;
     const metadata_vendor_id_t mVendorTagId;
 
@@ -269,8 +234,6 @@
     // End of mLock protect scope
 
     std::mutex mProcessCaptureResultLock;
-    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
-    std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
 
     // Tracking cause of fatal errors when in STATUS_ERROR
     String8 mErrorCause;
@@ -283,6 +246,9 @@
     // For client methods such as disconnect/dump
     std::mutex mInterfaceLock;
 
+    // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
+    nsecs_t mMinExpectedDuration = 0;
+
     // SetErrorInterface
     void setErrorState(const char *fmt, ...) override;
     void setErrorStateLocked(const char *fmt, ...) override;
@@ -305,6 +271,8 @@
     void setErrorStateLockedV(const char *fmt, va_list args);
 
     status_t disconnectImpl();
+    virtual void disconnectSession() = 0;
+
 }; // class Camera3OfflineSession
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3738d01..1e20ee0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -18,14 +18,20 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <algorithm>
 #include <ctime>
 #include <fstream>
 
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
 #include <android-base/unique_fd.h>
+#include <cutils/properties.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <common/CameraDeviceBase.h>
 #include "api1/client2/JpegProcessor.h"
 #include "Camera3OutputStream.h"
 #include "utils/TraceHFR.h"
@@ -39,16 +45,23 @@
 
 namespace camera3 {
 
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
 Camera3OutputStream::Camera3OutputStream(int id,
         sp<Surface> consumer,
         uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
-        const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution) :
+        const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+        int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int mirrorMode) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
+                            dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
+                            timestampBase),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -56,7 +69,9 @@
         mTimestampOffset(timestampOffset),
         mConsumerUsage(0),
         mDropBuffers(false),
-        mDequeueBufferLatency(kDequeueLatencyBinSize) {
+        mMirrorMode(mirrorMode),
+        mDequeueBufferLatency(kDequeueLatencyBinSize),
+        mIPCTransport(transport) {
 
     if (mConsumer == NULL) {
         ALOGE("%s: Consumer is NULL!", __FUNCTION__);
@@ -72,20 +87,24 @@
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
-        const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution) :
+        const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+        int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int mirrorMode) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
-                            setId, isMultiResolution),
+                            setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
+                            deviceTimeBaseIsRealtime, timestampBase),
         mConsumer(consumer),
         mTransform(0),
         mTraceFirstBuffer(true),
-        mUseMonoTimestamp(false),
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
         mConsumerUsage(0),
         mDropBuffers(false),
-        mDequeueBufferLatency(kDequeueLatencyBinSize) {
+        mMirrorMode(mirrorMode),
+        mDequeueBufferLatency(kDequeueLatencyBinSize),
+        mIPCTransport(transport) {
 
     if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
         ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
@@ -107,11 +126,15 @@
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation, nsecs_t timestampOffset,
         const String8& physicalCameraId,
-        const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution) :
+        const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+        int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int mirrorMode) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
                             /*maxSize*/0, format, dataSpace, rotation,
-                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
+                            dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
+                            timestampBase),
         mConsumer(nullptr),
         mTransform(0),
         mTraceFirstBuffer(true),
@@ -119,7 +142,9 @@
         mTimestampOffset(timestampOffset),
         mConsumerUsage(consumerUsage),
         mDropBuffers(false),
-        mDequeueBufferLatency(kDequeueLatencyBinSize) {
+        mMirrorMode(mirrorMode),
+        mDequeueBufferLatency(kDequeueLatencyBinSize),
+        mIPCTransport(transport) {
     // Deferred consumer only support preview surface format now.
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
@@ -146,21 +171,28 @@
                                          android_dataspace dataSpace,
                                          camera_stream_rotation_t rotation,
                                          const String8& physicalCameraId,
-                                        const std::unordered_set<int32_t> &sensorPixelModesUsed,
+                                         const std::unordered_set<int32_t> &sensorPixelModesUsed,
+                                         IPCTransport transport,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
-                                         int setId, bool isMultiResolution) :
+                                         int setId, bool isMultiResolution,
+                                         int64_t dynamicRangeProfile, int64_t streamUseCase,
+                                         bool deviceTimeBaseIsRealtime, int timestampBase,
+                                         int mirrorMode) :
         Camera3IOStreamBase(id, type, width, height,
                             /*maxSize*/0,
                             format, dataSpace, rotation,
-                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
+                            physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
+                            dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
+                            timestampBase),
         mTransform(0),
         mTraceFirstBuffer(true),
-        mUseMonoTimestamp(false),
         mUseBufferManager(false),
         mTimestampOffset(timestampOffset),
         mConsumerUsage(consumerUsage),
         mDropBuffers(false),
-        mDequeueBufferLatency(kDequeueLatencyBinSize) {
+        mMirrorMode(mirrorMode),
+        mDequeueBufferLatency(kDequeueLatencyBinSize),
+        mIPCTransport(transport) {
 
     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
@@ -263,14 +295,16 @@
 
 status_t Camera3OutputStream::returnBufferLocked(
         const camera_stream_buffer &buffer,
-        nsecs_t timestamp, int32_t transform, const std::vector<size_t>& surface_ids) {
+        nsecs_t timestamp, nsecs_t readoutTimestamp,
+        int32_t transform, const std::vector<size_t>& surface_ids) {
     ATRACE_HFR_CALL();
 
     if (mHandoutTotalBufferCount == 1) {
         returnPrefetchedBuffersLocked();
     }
 
-    status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, transform, surface_ids);
+    status_t res = returnAnyBufferLocked(buffer, timestamp, readoutTimestamp,
+                                         /*output*/true, transform, surface_ids);
 
     if (res != OK) {
         return res;
@@ -282,9 +316,79 @@
     return OK;
 }
 
+status_t Camera3OutputStream::fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence) {
+    // Lock the JPEG buffer for CPU read
+    sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
+    void* mapped = nullptr;
+    base::unique_fd fenceFd(dup(fence));
+    // Use USAGE_SW_WRITE_RARELY since we're going to re-write the CameraBlob
+    // header.
+    GraphicBufferLocker gbLocker(graphicBuffer);
+    status_t res =
+            gbLocker.lockAsync(
+                    GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
+                    &mapped, fenceFd.get());
+    if (res != OK) {
+        ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    uint8_t *hidlHeaderStart =
+            static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t);
+    // Check that the jpeg buffer is big enough to contain HIDL camera blob
+    if (hidlHeaderStart < static_cast<uint8_t *>(mapped)) {
+        ALOGE("%s, jpeg buffer not large enough to fit HIDL camera blob %" PRIu32, __FUNCTION__,
+                graphicBuffer->getWidth());
+        return BAD_VALUE;
+    }
+    camera_jpeg_blob_t *hidlBlobHeader = reinterpret_cast<camera_jpeg_blob_t *>(hidlHeaderStart);
+
+    // Check that the blob is indeed the jpeg blob id.
+    if (hidlBlobHeader->jpeg_blob_id != CAMERA_JPEG_BLOB_ID) {
+        ALOGE("%s, jpeg blob id %d is not correct", __FUNCTION__, hidlBlobHeader->jpeg_blob_id);
+        return BAD_VALUE;
+    }
+
+    // Retrieve id and blob size
+    CameraBlobId blobId = static_cast<CameraBlobId>(hidlBlobHeader->jpeg_blob_id);
+    uint32_t blobSizeBytes = hidlBlobHeader->jpeg_size;
+
+    if (blobSizeBytes > (graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t))) {
+        ALOGE("%s, blobSize in HIDL jpeg blob : %d is corrupt, buffer size %" PRIu32, __FUNCTION__,
+                  blobSizeBytes, graphicBuffer->getWidth());
+    }
+
+    uint8_t *aidlHeaderStart =
+            static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(CameraBlob);
+
+    // Check that the jpeg buffer is big enough to contain AIDL camera blob
+    if (aidlHeaderStart < static_cast<uint8_t *>(mapped)) {
+        ALOGE("%s, jpeg buffer not large enough to fit AIDL camera blob %" PRIu32, __FUNCTION__,
+                graphicBuffer->getWidth());
+        return BAD_VALUE;
+    }
+
+    if (static_cast<uint8_t*>(mapped) + blobSizeBytes > aidlHeaderStart) {
+        ALOGE("%s, jpeg blob with size %d , buffer size %" PRIu32 " not large enough to fit"
+                " AIDL camera blob without corrupting jpeg", __FUNCTION__, blobSizeBytes,
+                graphicBuffer->getWidth());
+        return BAD_VALUE;
+    }
+
+    // Fill in JPEG header
+    CameraBlob aidlHeader = {
+            .blobId = blobId,
+            .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
+    };
+    memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
+    graphicBuffer->unlock();
+    return OK;
+}
+
 status_t Camera3OutputStream::returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
@@ -346,32 +450,47 @@
             }
             mTraceFirstBuffer = false;
         }
-
-        if (transform != -1) {
-            setTransformLocked(transform);
+        // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
+        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
+            if (mIPCTransport == IPCTransport::HIDL) {
+                fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
+            }
+            // If this is a JPEG output, and image dump mask is set, save image to
+            // disk.
+            if (mImageDumpMask) {
+                dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
+            }
         }
 
-        /* Certain consumers (such as AudioSource or HardwareComposer) use
-         * MONOTONIC time, causing time misalignment if camera timestamp is
-         * in BOOTTIME. Do the conversion if necessary. */
-        res = native_window_set_buffers_timestamp(mConsumer.get(),
-                mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
-        if (res != OK) {
-            ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
-                  __FUNCTION__, mId, strerror(-res), res);
-            return res;
-        }
-        // If this is a JPEG output, and image dump mask is set, save image to
-        // disk.
-        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
-                mImageDumpMask) {
-            dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
-        }
+        if (mPreviewFrameSpacer != nullptr) {
+            res = mPreviewFrameSpacer->queuePreviewBuffer(timestamp - mTimestampOffset,
+                    readoutTimestamp - mTimestampOffset, transform, anwBuffer, anwReleaseFence);
+            if (res != OK) {
+                ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
+                        __FUNCTION__, mId, strerror(-res), res);
+                return res;
+            }
+        } else {
+            nsecs_t captureTime = (mSyncToDisplay ? readoutTimestamp : timestamp)
+                    - mTimestampOffset;
+            nsecs_t presentTime = mSyncToDisplay ?
+                    syncTimestampToDisplayLocked(captureTime) : captureTime;
 
-        res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
-        if (shouldLogError(res, state)) {
-            ALOGE("%s: Stream %d: Error queueing buffer to native window:"
-                  " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+            setTransform(transform, true/*mayChangeMirror*/);
+            res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
+            if (res != OK) {
+                ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+                      __FUNCTION__, mId, strerror(-res), res);
+                return res;
+            }
+
+            queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
+
+            res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
+            if (shouldLogError(res, state)) {
+                ALOGE("%s: Stream %d: Error queueing buffer to native window:"
+                      " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+            }
         }
     }
     mLock.lock();
@@ -404,14 +523,23 @@
         "      DequeueBuffer latency histogram:");
 }
 
-status_t Camera3OutputStream::setTransform(int transform) {
+status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
+    if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
+        // If the mirroring mode is not AUTO, do not allow transform update
+        // which may change mirror.
+        return OK;
+    }
+
     return setTransformLocked(transform);
 }
 
 status_t Camera3OutputStream::setTransformLocked(int transform) {
     status_t res = OK;
+
+    if (transform == -1) return res;
+
     if (mState == STATE_ERROR) {
         ALOGE("%s: Stream in error state", __FUNCTION__);
         return INVALID_OPERATION;
@@ -437,7 +565,7 @@
         return res;
     }
 
-    if ((res = configureConsumerQueueLocked()) != OK) {
+    if ((res = configureConsumerQueueLocked(true /*allowPreviewRespace*/)) != OK) {
         return res;
     }
 
@@ -461,7 +589,7 @@
     return OK;
 }
 
-status_t Camera3OutputStream::configureConsumerQueueLocked() {
+status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewRespace) {
     status_t res;
 
     mTraceFirstBuffer = true;
@@ -547,10 +675,56 @@
     }
 
     mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
+
+    int timestampBase = getTimestampBase();
+    bool isDefaultTimeBase = (timestampBase ==
+            OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+    if (allowPreviewRespace)  {
+        bool forceChoreographer = (timestampBase ==
+                OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
+        bool defaultToChoreographer = (isDefaultTimeBase &&
+                isConsumedByHWComposer());
+        bool defaultToSpacer = (isDefaultTimeBase &&
+                isConsumedByHWTexture() &&
+                !isConsumedByCPU() &&
+                !isVideoStream());
+        if (forceChoreographer || defaultToChoreographer) {
+            mSyncToDisplay = true;
+            mTotalBufferCount += kDisplaySyncExtraBuffer;
+        } else if (defaultToSpacer) {
+            mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
+            mTotalBufferCount ++;
+            res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
+            if (res != OK) {
+                ALOGE("%s: Unable to start preview spacer", __FUNCTION__);
+                return res;
+            }
+        }
+    }
     mHandoutTotalBufferCount = 0;
     mFrameCount = 0;
     mLastTimestamp = 0;
-    mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
+
+    if (isDeviceTimeBaseRealtime()) {
+        if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
+            // Default time base, but not hardware composer or video encoder
+            mTimestampOffset = 0;
+        } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
+                timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
+            mTimestampOffset = 0;
+        }
+        // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
+        // timestamp offset as bootTime - monotonicTime.
+    } else {
+        if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
+            // Reverse offset for monotonicTime -> bootTime
+            mTimestampOffset = -mTimestampOffset;
+        } else {
+            // If timestampBase is DEFAULT, MONOTONIC, SENSOR, or
+            // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
+            mTimestampOffset = 0;
+        }
+    }
 
     res = native_window_set_buffer_count(mConsumer.get(),
             mTotalBufferCount);
@@ -799,6 +973,10 @@
 
     returnPrefetchedBuffersLocked();
 
+    if (mPreviewFrameSpacer != nullptr) {
+        mPreviewFrameSpacer->requestExit();
+    }
+
     ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
 
     res = native_window_api_disconnect(mConsumer.get(),
@@ -1092,6 +1270,17 @@
     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
 }
 
+bool Camera3OutputStream::isConsumedByCPU() const {
+    uint64_t usage = 0;
+    status_t res = getEndpointUsage(&usage);
+    if (res != OK) {
+        ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
+        return false;
+    }
+
+    return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
+}
+
 void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
         ANativeWindowBuffer* anwBuffer, int fence) {
     // Deriver output file name
@@ -1168,6 +1357,11 @@
     return OK;
 }
 
+void Camera3OutputStream::onMinDurationChanged(nsecs_t duration) {
+    Mutex::Autolock l(mLock);
+    mMinExpectedDuration = duration;
+}
+
 void Camera3OutputStream::returnPrefetchedBuffersLocked() {
     std::vector<Surface::BatchBuffer> batchedBuffers;
 
@@ -1185,6 +1379,99 @@
     }
 }
 
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+    ParcelableVsyncEventData parcelableVsyncEventData;
+    auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
+                __FUNCTION__, mId, strerror(-res), res);
+        mLastCaptureTime = t;
+        mLastPresentTime = t;
+        return t;
+    }
+
+    const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
+    nsecs_t currentTime = systemTime();
+
+    // Reset capture to present time offset if:
+    // - More than 1 second between frames.
+    // - The frame duration deviates from multiples of vsync frame intervals.
+    nsecs_t captureInterval = t - mLastCaptureTime;
+    float captureToVsyncIntervalRatio = 1.0f * captureInterval / vsyncEventData.frameInterval;
+    float ratioDeviation = std::fabs(
+            captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
+    if (captureInterval > kSpacingResetIntervalNs ||
+            ratioDeviation >= kMaxIntervalRatioDeviation) {
+        nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
+        for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+            const auto& timeline = vsyncEventData.frameTimelines[i];
+            if (timeline.deadlineTimestamp >= currentTime &&
+                    timeline.expectedPresentationTime > minPresentT) {
+                nsecs_t presentT = vsyncEventData.frameTimelines[i].expectedPresentationTime;
+                mCaptureToPresentOffset = presentT - t;
+                mLastCaptureTime = t;
+                mLastPresentTime = presentT;
+
+                // Move the expected presentation time back by 1/3 of frame interval to
+                // mitigate the time drift. Due to time drift, if we directly use the
+                // expected presentation time, often times 2 expected presentation time
+                // falls into the same VSYNC interval.
+                return presentT - vsyncEventData.frameInterval/3;
+            }
+        }
+    }
+
+    nsecs_t idealPresentT = t + mCaptureToPresentOffset;
+    nsecs_t expectedPresentT = mLastPresentTime;
+    nsecs_t minDiff = INT64_MAX;
+    // Derive minimum intervals between presentation times based on minimal
+    // expected duration. The minimum number of Vsyncs is:
+    // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
+    // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
+    // - and so on.
+    int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
+            vsyncEventData.frameInterval;
+    if (minVsyncs < 0) minVsyncs = 0;
+    nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
+    // Find best timestamp in the vsync timelines:
+    // - Only use at most 3 timelines to avoid long latency
+    // - closest to the ideal present time,
+    // - deadline timestamp is greater than the current time, and
+    // - the candidate present time is at least minInterval in the future
+    //   compared to last present time.
+    int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
+    float biasForShortDelay = 1.0f;
+    for (int i = 0; i < maxTimelines; i ++) {
+        const auto& vsyncTime = vsyncEventData.frameTimelines[i];
+        if (minVsyncs > 0) {
+            // Bias towards using smaller timeline index:
+            //   i = 0:                bias = 1
+            //   i = maxTimelines-1:   bias = -1
+            biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
+        }
+        if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
+                vsyncTime.deadlineTimestamp >= currentTime &&
+                vsyncTime.expectedPresentationTime >
+                mLastPresentTime + minInterval + biasForShortDelay * kTimelineThresholdNs) {
+            expectedPresentT = vsyncTime.expectedPresentationTime;
+            minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
+        }
+    }
+    mLastCaptureTime = t;
+    mLastPresentTime = expectedPresentT;
+
+    // Move the expected presentation time back by 1/3 of frame interval to
+    // mitigate the time drift. Due to time drift, if we directly use the
+    // expected presentation time, often times 2 expected presentation time
+    // falls into the same VSYNC interval.
+    return expectedPresentT - vsyncEventData.frameInterval/3;
+}
+
+bool Camera3OutputStream::shouldLogError(status_t res) {
+    Mutex::Autolock l(mLock);
+    return shouldLogError(res, mState);
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 0872687..e8065ce 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -21,12 +21,15 @@
 #include <utils/RefBase.h>
 #include <gui/IProducerListener.h>
 #include <gui/Surface.h>
+#include <gui/DisplayEventReceiver.h>
 
+#include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
 #include "Camera3Stream.h"
 #include "Camera3IOStreamBase.h"
 #include "Camera3OutputStreamInterface.h"
 #include "Camera3BufferManager.h"
+#include "PreviewFrameSpacer.h"
 
 namespace android {
 
@@ -87,8 +90,13 @@
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
-            const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            bool deviceTimeBaseIsRealtime = false,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
     /**
      * Set up a stream for formats that have a variable buffer size for the same
      * dimensions, such as compressed JPEG.
@@ -99,8 +107,13 @@
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             nsecs_t timestampOffset, const String8& physicalCameraId,
-            const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            bool deviceTimeBaseIsRealtime = false,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
     /**
      * Set up a stream with deferred consumer for formats that have 2 dimensions, such as
      * RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -110,8 +123,13 @@
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
-            const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
+            const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            bool deviceTimeBaseIsRealtime = false,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
 
     virtual ~Camera3OutputStream();
 
@@ -125,7 +143,7 @@
      * Set the transform on the output stream; one of the
      * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
      */
-    status_t         setTransform(int transform);
+    status_t         setTransform(int transform, bool mayChangeMirror);
 
     /**
      * Return if this output stream is for video encoding.
@@ -142,6 +160,11 @@
     bool isConsumedByHWTexture() const;
 
     /**
+     * Return if this output stream is consumed by CPU.
+     */
+    bool isConsumedByCPU() const;
+
+    /**
      * Return if the consumer configuration of this stream is deferred.
      */
     virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
@@ -224,20 +247,31 @@
     virtual status_t setBatchSize(size_t batchSize = 1) override;
 
     /**
+     * Notify the stream on change of min frame durations.
+     */
+    virtual void onMinDurationChanged(nsecs_t duration) override;
+
+    /**
      * Apply ZSL related consumer usage quirk.
      */
     static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);
 
     void setImageDumpMask(int mask) { mImageDumpMask = mask; }
+    bool shouldLogError(status_t res);
 
   protected:
     Camera3OutputStream(int id, camera_stream_type_t type,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
-            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
-            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
+            int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            bool deviceTimeBaseIsRealtime = false,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
 
     /**
      * Note that we release the lock briefly in this function
@@ -245,6 +279,7 @@
     virtual status_t returnBufferCheckedLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp,
+            nsecs_t readoutTimestamp,
             bool output,
             int32_t transform,
             const std::vector<size_t>& surface_ids,
@@ -252,10 +287,11 @@
             sp<Fence> *releaseFenceOut);
 
     virtual status_t disconnectLocked();
+    status_t fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence);
 
     status_t getEndpointUsageForSurface(uint64_t *usage,
             const sp<Surface>& surface) const;
-    status_t configureConsumerQueueLocked();
+    status_t configureConsumerQueueLocked(bool allowPreviewRespace);
 
     // Consumer as the output of camera HAL
     sp<Surface> mConsumer;
@@ -278,9 +314,6 @@
     // Name of Surface consumer
     String8           mConsumerName;
 
-    // Whether consumer assumes MONOTONIC timestamp
-    bool mUseMonoTimestamp;
-
     /**
      * GraphicBuffer manager this stream is registered to. Used to replace the buffer
      * allocation/deallocation role of BufferQueue.
@@ -299,7 +332,11 @@
     bool mUseBufferManager;
 
     /**
-     * Timestamp offset for video and hardware composer consumed streams
+     * Offset used to override camera HAL produced timestamps
+     *
+     * The offset is first initialized to bootTime - monotonicTime in
+     * constructor, and may later be updated based on the client's timestampBase
+     * setting.
      */
     nsecs_t mTimestampOffset;
 
@@ -323,6 +360,8 @@
     std::vector<Surface::BatchBuffer> mBatchedBuffers;
     // ---- End of mBatchLock protected scope ----
 
+    const int mMirrorMode;
+
     /**
      * Internal Camera3Stream interface
      */
@@ -333,7 +372,8 @@
 
     virtual status_t returnBufferLocked(
             const camera_stream_buffer &buffer,
-            nsecs_t timestamp, int32_t transform, const std::vector<size_t>& surface_ids);
+            nsecs_t timestamp, nsecs_t readoutTimestamp,
+            int32_t transform, const std::vector<size_t>& surface_ids);
 
     virtual status_t queueBufferToConsumer(sp<ANativeWindow>& consumer,
             ANativeWindowBuffer* buffer, int anwReleaseFence,
@@ -365,11 +405,32 @@
 
     void returnPrefetchedBuffersLocked();
 
+
     static const int32_t kDequeueLatencyBinSize = 5; // in ms
     CameraLatencyHistogram mDequeueBufferLatency;
+    IPCTransport mIPCTransport = IPCTransport::INVALID;
 
     int mImageDumpMask = 0;
 
+    // Re-space frames by overriding timestamp to align with display Vsync.
+    // Default is on for SurfaceView bound streams.
+    nsecs_t mMinExpectedDuration = 0;
+    bool mSyncToDisplay = false;
+    DisplayEventReceiver mDisplayEventReceiver;
+    nsecs_t mLastCaptureTime = 0;
+    nsecs_t mLastPresentTime = 0;
+    nsecs_t mCaptureToPresentOffset = 0;
+    static constexpr size_t kDisplaySyncExtraBuffer = 2;
+    static constexpr nsecs_t kSpacingResetIntervalNs = 50000000LL; // 50 millisecond
+    static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
+    static constexpr float kMaxIntervalRatioDeviation = 0.05f;
+    static constexpr int kMaxTimelines = 3;
+    nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+
+    // Re-space frames by delaying queueBuffer so that frame delivery has
+    // the same cadence as capture. Default is on for SurfaceTexture bound
+    // streams.
+    sp<PreviewFrameSpacer> mPreviewFrameSpacer;
 }; // class Camera3OutputStream
 
 } // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 49f9f62..a6d4b96 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -34,7 +34,7 @@
      * Set the transform on the output stream; one of the
      * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
      */
-    virtual status_t setTransform(int transform) = 0;
+    virtual status_t setTransform(int transform, bool mayChangeMirror) = 0;
 
     /**
      * Return if this output stream is for video encoding.
@@ -108,6 +108,14 @@
      * instead.
      */
     virtual status_t setBatchSize(size_t batchSize = 1) = 0;
+
+    /**
+     * Notify the output stream that the minimum frame duration has changed.
+     *
+     * The minimum frame duration is calculated based on the upper bound of
+     * AE_TARGET_FPS_RANGE in the capture request.
+     */
+    virtual void onMinDurationChanged(nsecs_t duration) = 0;
 };
 
 // Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index d765b02..ed66df0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -376,43 +376,6 @@
     insertResultLocked(states, &captureResult, frameNumber);
 }
 
-// Reading one camera metadata from result argument via fmq or from the result
-// Assuming the fmq is protected by a lock already
-status_t readOneCameraMetadataLocked(
-        std::unique_ptr<ResultMetadataQueue>& fmq,
-        uint64_t fmqResultSize,
-        hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
-        const hardware::camera::device::V3_2::CameraMetadata& result) {
-    if (fmqResultSize > 0) {
-        resultMetadata.resize(fmqResultSize);
-        if (fmq == nullptr) {
-            return NO_MEMORY; // logged in initialize()
-        }
-        if (!fmq->read(resultMetadata.data(), fmqResultSize)) {
-            ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
-                    __FUNCTION__, fmqResultSize);
-            return INVALID_OPERATION;
-        }
-    } else {
-        resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
-                result.size());
-    }
-
-    if (resultMetadata.size() != 0) {
-        status_t res;
-        const camera_metadata_t* metadata =
-                reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
-        size_t expected_metadata_size = resultMetadata.size();
-        if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
-            ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return INVALID_OPERATION;
-        }
-    }
-
-    return OK;
-}
-
 void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
     ATRACE_CALL();
     InFlightRequestMap& inflightMap = states.inflightMap;
@@ -463,7 +426,7 @@
         returnOutputBuffers(
             states.useHalBufManager, states.listener,
             request.pendingOutputBuffers.array(),
-            request.pendingOutputBuffers.size(), 0,
+            request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
             /*timestampIncreasing*/true,
             request.outputSurfaces, request.resultExtras,
@@ -472,7 +435,7 @@
         // Note down the just completed frame number
         if (request.hasInputBuffer) {
             states.lastCompletedReprocessFrameNumber = frameNumber;
-        } else if (request.zslCapture) {
+        } else if (request.zslCapture && request.stillCapture) {
             states.lastCompletedZslFrameNumber = frameNumber;
         } else {
             states.lastCompletedRegularFrameNumber = frameNumber;
@@ -569,7 +532,7 @@
                     auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
                     if (orientation.count > 0) {
                         ret = CameraUtils::getRotationTransform(deviceInfo->second,
-                                &request.transform);
+                                OutputConfiguration::MIRROR_MODE_AUTO, &request.transform);
                         if (ret != OK) {
                             ALOGE("%s: Failed to calculate current stream transformation: %s (%d)",
                                     __FUNCTION__, strerror(-ret), ret);
@@ -719,160 +682,13 @@
     }
 }
 
-void processOneCaptureResultLocked(
-        CaptureOutputStates& states,
-        const hardware::camera::device::V3_2::CaptureResult& result,
-        const hardware::hidl_vec<
-                hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
-    using hardware::camera::device::V3_2::StreamBuffer;
-    using hardware::camera::device::V3_2::BufferStatus;
-    std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
-    BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
-    camera_capture_result r;
-    status_t res;
-    r.frame_number = result.frameNumber;
-
-    // Read and validate the result metadata.
-    hardware::camera::device::V3_2::CameraMetadata resultMetadata;
-    res = readOneCameraMetadataLocked(
-            fmq, result.fmqResultSize,
-            resultMetadata, result.result);
-    if (res != OK) {
-        ALOGE("%s: Frame %d: Failed to read capture result metadata",
-                __FUNCTION__, result.frameNumber);
-        return;
-    }
-    r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
-
-    // Read and validate physical camera metadata
-    size_t physResultCount = physicalCameraMetadata.size();
-    std::vector<const char*> physCamIds(physResultCount);
-    std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
-    std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
-    physResultMetadata.resize(physResultCount);
-    for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
-        res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
-                physResultMetadata[i], physicalCameraMetadata[i].metadata);
-        if (res != OK) {
-            ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
-                    __FUNCTION__, result.frameNumber,
-                    physicalCameraMetadata[i].physicalCameraId.c_str());
-            return;
-        }
-        physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
-        phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
-                physResultMetadata[i].data());
-    }
-    r.num_physcam_metadata = physResultCount;
-    r.physcam_ids = physCamIds.data();
-    r.physcam_metadata = phyCamMetadatas.data();
-
-    std::vector<camera_stream_buffer_t> outputBuffers(result.outputBuffers.size());
-    std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
-    for (size_t i = 0; i < result.outputBuffers.size(); i++) {
-        auto& bDst = outputBuffers[i];
-        const StreamBuffer &bSrc = result.outputBuffers[i];
-
-        sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
-        if (stream == nullptr) {
-            ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
-                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
-            return;
-        }
-        bDst.stream = stream->asHalStream();
-
-        bool noBufferReturned = false;
-        buffer_handle_t *buffer = nullptr;
-        if (states.useHalBufManager) {
-            // This is suspicious most of the time but can be correct during flush where HAL
-            // has to return capture result before a buffer is requested
-            if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
-                if (bSrc.status == BufferStatus::OK) {
-                    ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
-                            __FUNCTION__, result.frameNumber, i, bSrc.streamId);
-                    // Still proceeds so other buffers can be returned
-                }
-                noBufferReturned = true;
-            }
-            if (noBufferReturned) {
-                res = OK;
-            } else {
-                res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
-            }
-        } else {
-            res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
-        }
-
-        if (res != OK) {
-            ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
-                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
-            return;
-        }
-
-        bDst.buffer = buffer;
-        bDst.status = mapHidlBufferStatus(bSrc.status);
-        bDst.acquire_fence = -1;
-        if (bSrc.releaseFence == nullptr) {
-            bDst.release_fence = -1;
-        } else if (bSrc.releaseFence->numFds == 1) {
-            if (noBufferReturned) {
-                ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
-            }
-            bDst.release_fence = dup(bSrc.releaseFence->data[0]);
-        } else {
-            ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
-                    __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
-            return;
-        }
-    }
-    r.num_output_buffers = outputBuffers.size();
-    r.output_buffers = outputBuffers.data();
-
-    camera_stream_buffer_t inputBuffer;
-    if (result.inputBuffer.streamId == -1) {
-        r.input_buffer = nullptr;
-    } else {
-        if (states.inputStream->getId() != result.inputBuffer.streamId) {
-            ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
-                    result.frameNumber, result.inputBuffer.streamId);
-            return;
-        }
-        inputBuffer.stream = states.inputStream->asHalStream();
-        buffer_handle_t *buffer;
-        res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
-                &buffer);
-        if (res != OK) {
-            ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
-                    __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
-            return;
-        }
-        inputBuffer.buffer = buffer;
-        inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
-        inputBuffer.acquire_fence = -1;
-        if (result.inputBuffer.releaseFence == nullptr) {
-            inputBuffer.release_fence = -1;
-        } else if (result.inputBuffer.releaseFence->numFds == 1) {
-            inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
-        } else {
-            ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
-                    __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
-            return;
-        }
-        r.input_buffer = &inputBuffer;
-    }
-
-    r.partial_result = result.partialResult;
-
-    processCaptureResult(states, &r);
-}
-
 void returnOutputBuffers(
         bool useHalBufManager,
         sp<NotificationListener> listener,
         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
-        nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
-        SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing,
-        const SurfaceMap& outputSurfaces,
+        nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
+        nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
+        bool timestampIncreasing, const SurfaceMap& outputSurfaces,
         const CaptureResultExtras &inResultExtras,
         ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
 
@@ -916,11 +732,11 @@
                 errorBufStrategy != ERROR_BUF_CACHE) {
             if (it != outputSurfaces.end()) {
                 res = stream->returnBuffer(
-                        outputBuffers[i], timestamp, timestampIncreasing, it->second,
-                        inResultExtras.frameNumber, transform);
+                        outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
+                        it->second, inResultExtras.frameNumber, transform);
             } else {
                 res = stream->returnBuffer(
-                        outputBuffers[i], timestamp, timestampIncreasing,
+                        outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
                         std::vector<size_t> (), inResultExtras.frameNumber, transform);
             }
         }
@@ -951,7 +767,7 @@
             // cancel the buffer
             camera_stream_buffer_t sb = outputBuffers[i];
             sb.status = CAMERA_BUFFER_STATUS_ERROR;
-            stream->returnBuffer(sb, /*timestamp*/0,
+            stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
                     timestampIncreasing, std::vector<size_t> (),
                     inResultExtras.frameNumber, transform);
 
@@ -969,12 +785,13 @@
 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
         sp<NotificationListener> listener, InFlightRequest& request,
         SessionStatsBuilder& sessionStatsBuilder) {
-    bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+    bool timestampIncreasing =
+            !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
     returnOutputBuffers(useHalBufManager, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
-            request.shutterTimestamp, /*requested*/true,
-            request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
+            request.shutterTimestamp, request.shutterReadoutTimestamp,
+            /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
 
@@ -1035,6 +852,14 @@
             }
 
             r.shutterTimestamp = msg.timestamp;
+            r.shutterReadoutTimestamp = msg.readout_timestamp;
+            if (r.minExpectedDuration != states.minFrameDuration) {
+                for (size_t i = 0; i < states.outputStreams.size(); i++) {
+                    auto outputStream = states.outputStreams[i];
+                    outputStream->onMinDurationChanged(r.minExpectedDuration);
+                }
+                states.minFrameDuration = r.minExpectedDuration;
+            }
             if (r.hasCallback) {
                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
                     states.cameraId.string(), __FUNCTION__,
@@ -1192,296 +1017,6 @@
     }
 }
 
-void notify(CaptureOutputStates& states,
-        const hardware::camera::device::V3_2::NotifyMsg& msg) {
-    using android::hardware::camera::device::V3_2::MsgType;
-    using android::hardware::camera::device::V3_2::ErrorCode;
-
-    ATRACE_CALL();
-    camera_notify_msg m;
-    switch (msg.type) {
-        case MsgType::ERROR:
-            m.type = CAMERA_MSG_ERROR;
-            m.message.error.frame_number = msg.msg.error.frameNumber;
-            if (msg.msg.error.errorStreamId >= 0) {
-                sp<Camera3StreamInterface> stream =
-                        states.outputStreams.get(msg.msg.error.errorStreamId);
-                if (stream == nullptr) {
-                    ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
-                            m.message.error.frame_number, msg.msg.error.errorStreamId);
-                    return;
-                }
-                m.message.error.error_stream = stream->asHalStream();
-            } else {
-                m.message.error.error_stream = nullptr;
-            }
-            switch (msg.msg.error.errorCode) {
-                case ErrorCode::ERROR_DEVICE:
-                    m.message.error.error_code = CAMERA_MSG_ERROR_DEVICE;
-                    break;
-                case ErrorCode::ERROR_REQUEST:
-                    m.message.error.error_code = CAMERA_MSG_ERROR_REQUEST;
-                    break;
-                case ErrorCode::ERROR_RESULT:
-                    m.message.error.error_code = CAMERA_MSG_ERROR_RESULT;
-                    break;
-                case ErrorCode::ERROR_BUFFER:
-                    m.message.error.error_code = CAMERA_MSG_ERROR_BUFFER;
-                    break;
-            }
-            break;
-        case MsgType::SHUTTER:
-            m.type = CAMERA_MSG_SHUTTER;
-            m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
-            m.message.shutter.timestamp = msg.msg.shutter.timestamp;
-            break;
-    }
-    notify(states, &m);
-}
-
-void requestStreamBuffers(RequestBufferStates& states,
-        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
-        hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
-    using android::hardware::camera::device::V3_2::BufferStatus;
-    using android::hardware::camera::device::V3_2::StreamBuffer;
-    using android::hardware::camera::device::V3_5::BufferRequestStatus;
-    using android::hardware::camera::device::V3_5::StreamBufferRet;
-    using android::hardware::camera::device::V3_5::StreamBufferRequestError;
-
-    std::lock_guard<std::mutex> lock(states.reqBufferLock);
-
-    hardware::hidl_vec<StreamBufferRet> bufRets;
-    if (!states.useHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer management",
-                __FUNCTION__, states.cameraId.string());
-        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-        return;
-    }
-
-    SortedVector<int32_t> streamIds;
-    ssize_t sz = streamIds.setCapacity(bufReqs.size());
-    if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
-        ALOGE("%s: failed to allocate memory for %zu buffer requests",
-                __FUNCTION__, bufReqs.size());
-        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-        return;
-    }
-
-    if (bufReqs.size() > states.outputStreams.size()) {
-        ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
-                __FUNCTION__, bufReqs.size(), states.outputStreams.size());
-        _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-        return;
-    }
-
-    // Check for repeated streamId
-    for (const auto& bufReq : bufReqs) {
-        if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
-            ALOGE("%s: Stream %d appear multiple times in buffer requests",
-                    __FUNCTION__, bufReq.streamId);
-            _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
-            return;
-        }
-        streamIds.add(bufReq.streamId);
-    }
-
-    if (!states.reqBufferIntf.startRequestBuffer()) {
-        ALOGE("%s: request buffer disallowed while camera service is configuring",
-                __FUNCTION__);
-        _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
-        return;
-    }
-
-    bufRets.resize(bufReqs.size());
-
-    bool allReqsSucceeds = true;
-    bool oneReqSucceeds = false;
-    for (size_t i = 0; i < bufReqs.size(); i++) {
-        const auto& bufReq = bufReqs[i];
-        auto& bufRet = bufRets[i];
-        int32_t streamId = bufReq.streamId;
-        sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
-        if (outputStream == nullptr) {
-            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
-            hardware::hidl_vec<StreamBufferRet> emptyBufRets;
-            _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
-            states.reqBufferIntf.endRequestBuffer();
-            return;
-        }
-
-        bufRet.streamId = streamId;
-        if (outputStream->isAbandoned()) {
-            bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
-            allReqsSucceeds = false;
-            continue;
-        }
-
-        size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
-        uint32_t numBuffersRequested = bufReq.numBuffersRequested;
-        size_t totalHandout = handOutBufferCount + numBuffersRequested;
-        uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
-        if (totalHandout > maxBuffers) {
-            // Not able to allocate enough buffer. Exit early for this stream
-            ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
-                    " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
-                    numBuffersRequested, maxBuffers);
-            bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
-            allReqsSucceeds = false;
-            continue;
-        }
-
-        hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
-        bool currentReqSucceeds = true;
-        std::vector<camera_stream_buffer_t> streamBuffers(numBuffersRequested);
-        std::vector<buffer_handle_t> newBuffers;
-        size_t numAllocatedBuffers = 0;
-        size_t numPushedInflightBuffers = 0;
-        for (size_t b = 0; b < numBuffersRequested; b++) {
-            camera_stream_buffer_t& sb = streamBuffers[b];
-            // Since this method can run concurrently with request thread
-            // We need to update the wait duration everytime we call getbuffer
-            nsecs_t waitDuration =  states.reqBufferIntf.getWaitDuration();
-            status_t res = outputStream->getBuffer(&sb, waitDuration);
-            if (res != OK) {
-                if (res == NO_INIT || res == DEAD_OBJECT) {
-                    ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
-                            __FUNCTION__, streamId, strerror(-res), res);
-                    bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
-                    states.sessionStatsBuilder.stopCounter(streamId);
-                } else {
-                    ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
-                            __FUNCTION__, streamId, strerror(-res), res);
-                    if (res == TIMED_OUT || res == NO_MEMORY) {
-                        bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
-                    } else {
-                        bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
-                    }
-                }
-                currentReqSucceeds = false;
-                break;
-            }
-            numAllocatedBuffers++;
-
-            buffer_handle_t *buffer = sb.buffer;
-            auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
-            bool isNewBuffer = pair.first;
-            uint64_t bufferId = pair.second;
-            StreamBuffer& hBuf = tmpRetBuffers[b];
-
-            hBuf.streamId = streamId;
-            hBuf.bufferId = bufferId;
-            hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
-            hBuf.status = BufferStatus::OK;
-            hBuf.releaseFence = nullptr;
-            if (isNewBuffer) {
-                newBuffers.push_back(*buffer);
-            }
-
-            native_handle_t *acquireFence = nullptr;
-            if (sb.acquire_fence != -1) {
-                acquireFence = native_handle_create(1,0);
-                acquireFence->data[0] = sb.acquire_fence;
-            }
-            hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
-            hBuf.releaseFence = nullptr;
-
-            res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
-            if (res != OK) {
-                ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
-                        __FUNCTION__, streamId, strerror(-res), res);
-                bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
-                currentReqSucceeds = false;
-                break;
-            }
-            numPushedInflightBuffers++;
-        }
-        if (currentReqSucceeds) {
-            bufRet.val.buffers(std::move(tmpRetBuffers));
-            oneReqSucceeds = true;
-        } else {
-            allReqsSucceeds = false;
-            for (size_t b = 0; b < numPushedInflightBuffers; b++) {
-                StreamBuffer& hBuf = tmpRetBuffers[b];
-                buffer_handle_t* buffer;
-                status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
-                        hBuf.bufferId, &buffer);
-                if (res != OK) {
-                    SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
-                            __FUNCTION__, streamId, strerror(-res), res);
-                }
-            }
-            for (size_t b = 0; b < numAllocatedBuffers; b++) {
-                camera_stream_buffer_t& sb = streamBuffers[b];
-                sb.acquire_fence = -1;
-                sb.status = CAMERA_BUFFER_STATUS_ERROR;
-            }
-            returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
-                    streamBuffers.data(), numAllocatedBuffers, 0, /*requested*/false,
-                    /*requestTimeNs*/0, states.sessionStatsBuilder);
-            for (auto buf : newBuffers) {
-                states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
-            }
-        }
-    }
-
-    _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
-            oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
-                             BufferRequestStatus::FAILED_UNKNOWN,
-            bufRets);
-    states.reqBufferIntf.endRequestBuffer();
-}
-
-void returnStreamBuffers(ReturnBufferStates& states,
-        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
-    if (!states.useHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer managerment",
-                __FUNCTION__, states.cameraId.string());
-        return;
-    }
-
-    for (const auto& buf : buffers) {
-        if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
-            ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
-            continue;
-        }
-
-        buffer_handle_t* buffer;
-        status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
-
-        if (res != OK) {
-            ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
-                    __FUNCTION__, buf.bufferId, buf.streamId);
-            continue;
-        }
-
-        camera_stream_buffer_t streamBuffer;
-        streamBuffer.buffer = buffer;
-        streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
-        streamBuffer.acquire_fence = -1;
-        streamBuffer.release_fence = -1;
-
-        if (buf.releaseFence == nullptr) {
-            streamBuffer.release_fence = -1;
-        } else if (buf.releaseFence->numFds == 1) {
-            streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
-        } else {
-            ALOGE("%s: Invalid release fence, fd count is %d, not 1",
-                    __FUNCTION__, buf.releaseFence->numFds);
-            continue;
-        }
-
-        sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
-        if (stream == nullptr) {
-            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
-            continue;
-        }
-        streamBuffer.stream = stream->asHalStream();
-        returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
-                &streamBuffer, /*size*/1, /*timestamp*/ 0, /*requested*/false,
-                /*requestTimeNs*/0, states.sessionStatsBuilder);
-    }
-}
-
 void flushInflightRequests(FlushInflightReqStates& states) {
     ATRACE_CALL();
     { // First return buffers cached in inFlightMap
@@ -1491,9 +1026,10 @@
             returnOutputBuffers(
                 states.useHalBufManager, states.listener,
                 request.pendingOutputBuffers.array(),
-                request.pendingOutputBuffers.size(), 0, /*requested*/true,
-                request.requestTimeNs, states.sessionStatsBuilder, /*timestampIncreasing*/true,
-                request.outputSurfaces, request.resultExtras, request.errorBufStrategy);
+                request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
+                /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+                /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
+                request.errorBufStrategy);
             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
@@ -1565,7 +1101,7 @@
                 switch (halStream->stream_type) {
                     case CAMERA_STREAM_OUTPUT:
                         res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
-                                /*timestampIncreasing*/true,
+                                /*readoutTimestamp*/0, /*timestampIncreasing*/true,
                                 std::vector<size_t> (), frameNumber);
                         if (res != OK) {
                             ALOGE("%s: Can't return output buffer for frame %d to"
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 06b7ab4..d6107c2 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -38,70 +38,8 @@
 
 namespace android {
 
-using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
-
 namespace camera3 {
 
-    typedef struct camera_stream_configuration {
-        uint32_t num_streams;
-        camera_stream_t **streams;
-        uint32_t operation_mode;
-        bool input_is_multi_resolution;
-    } camera_stream_configuration_t;
-
-    typedef struct camera_capture_request {
-        uint32_t frame_number;
-        const camera_metadata_t *settings;
-        camera_stream_buffer_t *input_buffer;
-        uint32_t num_output_buffers;
-        const camera_stream_buffer_t *output_buffers;
-        uint32_t num_physcam_settings;
-        const char **physcam_id;
-        const camera_metadata_t **physcam_settings;
-        int32_t input_width;
-        int32_t input_height;
-    } camera_capture_request_t;
-
-    typedef struct camera_capture_result {
-        uint32_t frame_number;
-        const camera_metadata_t *result;
-        uint32_t num_output_buffers;
-        const camera_stream_buffer_t *output_buffers;
-        const camera_stream_buffer_t *input_buffer;
-        uint32_t partial_result;
-        uint32_t num_physcam_metadata;
-        const char **physcam_ids;
-        const camera_metadata_t **physcam_metadata;
-    } camera_capture_result_t;
-
-    typedef struct camera_shutter_msg {
-        uint32_t frame_number;
-        uint64_t timestamp;
-    } camera_shutter_msg_t;
-
-    typedef struct camera_error_msg {
-        uint32_t frame_number;
-        camera_stream_t *error_stream;
-        int error_code;
-    } camera_error_msg_t;
-
-    typedef enum camera_error_msg_code {
-        CAMERA_MSG_ERROR_DEVICE = 1,
-        CAMERA_MSG_ERROR_REQUEST = 2,
-        CAMERA_MSG_ERROR_RESULT = 3,
-        CAMERA_MSG_ERROR_BUFFER = 4,
-        CAMERA_MSG_NUM_ERRORS
-    } camera_error_msg_code_t;
-
-    typedef struct camera_notify_msg {
-        int type;
-
-        union {
-            camera_error_msg_t error;
-            camera_shutter_msg_t shutter;
-        } message;
-    } camera_notify_msg_t;
-
     /**
      * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
      */
@@ -112,7 +50,8 @@
             bool useHalBufManager,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
             const camera_stream_buffer_t *outputBuffers,
-            size_t numBuffers, nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
+            size_t numBuffers, nsecs_t timestamp,
+            nsecs_t readoutTimestamp, bool requested, nsecs_t requestTimeNs,
             SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
             // The following arguments are only meant for surface sharing use case
             const SurfaceMap& outputSurfaces = SurfaceMap{},
@@ -154,7 +93,6 @@
         const metadata_vendor_id_t vendorTagId;
         const CameraMetadata& deviceInfo;
         const std::unordered_map<std::string, CameraMetadata>& physicalDeviceInfoMap;
-        std::unique_ptr<ResultMetadataQueue>& fmq;
         std::unordered_map<std::string, camera3::DistortionMapper>& distortionMappers;
         std::unordered_map<std::string, camera3::ZoomRatioMapper>& zoomRatioMappers;
         std::unordered_map<std::string, camera3::RotateAndCropMapper>& rotateAndCropMappers;
@@ -167,19 +105,11 @@
         InflightRequestUpdateInterface& inflightIntf;
         BufferRecordsInterface& bufferRecordsIntf;
         bool legacyClient;
+        nsecs_t& minFrameDuration;
     };
 
-    // Handle one capture result. Assume callers hold the lock to serialize all
-    // processCaptureResult calls
-    void processOneCaptureResultLocked(
-            CaptureOutputStates& states,
-            const hardware::camera::device::V3_2::CaptureResult& result,
-            const hardware::hidl_vec<
-                    hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata);
-
-    // Handle one notify message
-    void notify(CaptureOutputStates& states,
-            const hardware::camera::device::V3_2::NotifyMsg& msg);
+    void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
+    void notify(CaptureOutputStates& states, const camera_notify_msg *msg);
 
     struct RequestBufferStates {
         const String8& cameraId;
@@ -192,10 +122,6 @@
         RequestBufferInterface& reqBufferIntf;
     };
 
-    void requestStreamBuffers(RequestBufferStates& states,
-            const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
-            hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb);
-
     struct ReturnBufferStates {
         const String8& cameraId;
         const bool useHalBufManager;
@@ -204,9 +130,6 @@
         BufferRecordsInterface& bufferRecordsIntf;
     };
 
-    void returnStreamBuffers(ReturnBufferStates& states,
-            const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers);
-
     struct FlushInflightReqStates {
         const String8& cameraId;
         std::mutex& inflightLock;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
new file mode 100644
index 0000000..2e05dda
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -0,0 +1,349 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_TEMPLUTILS_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_TEMPLUTILS_H
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/Trace.h>
+
+#include <aidl/android/hardware/common/NativeHandle.h>
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+
+#include <camera/CameraUtils.h>
+#include <camera_metadata_hidden.h>
+
+#include "device3/Camera3OutputUtils.h"
+
+#include "system/camera_metadata.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+namespace camera3 {
+
+template <class BufferStatusType>
+camera_buffer_status_t mapBufferStatus(BufferStatusType status) {
+    switch (status) {
+        case BufferStatusType::OK: return CAMERA_BUFFER_STATUS_OK;
+        case BufferStatusType::ERROR: return CAMERA_BUFFER_STATUS_ERROR;
+    }
+    return CAMERA_BUFFER_STATUS_ERROR;
+}
+
+inline void readBufferFromVec(hardware::hidl_vec<uint8_t> &dst,
+        const hardware::hidl_vec<uint8_t> &src) {
+    // Not cloning here since that will be done in processCaptureResult whil
+    // assigning to CameraMetadata.
+    dst.setToExternal(const_cast<uint8_t *>(src.data()), src.size());
+}
+
+inline void readBufferFromVec(std::vector<uint8_t> &dst, const std::vector<uint8_t> &src) {
+    dst = src;
+}
+
+// Reading one camera metadata from result argument via fmq or from the result
+// Assuming the fmq is protected by a lock already
+template <class FmqType, class FmqPayloadType, class MetadataType>
+status_t readOneCameraMetadataLockedT(
+        std::unique_ptr<FmqType>& fmq,
+        uint64_t fmqResultSize,
+        MetadataType& resultMetadata,
+        const MetadataType& result) {
+    if (fmqResultSize > 0) {
+        resultMetadata.resize(fmqResultSize);
+        if (fmq == nullptr) {
+            return NO_MEMORY; // logged in initialize()
+        }
+        if (!fmq->read(reinterpret_cast<FmqPayloadType *>(resultMetadata.data()), fmqResultSize)) {
+            ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
+                    __FUNCTION__, fmqResultSize);
+            return INVALID_OPERATION;
+        }
+    } else {
+        readBufferFromVec(resultMetadata, result);
+    }
+
+    if (resultMetadata.size() != 0) {
+        status_t res;
+        const camera_metadata_t* metadata =
+                reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+        size_t expected_metadata_size = resultMetadata.size();
+        if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
+            ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return INVALID_OPERATION;
+        }
+    }
+
+    return OK;
+}
+
+inline bool isHandleNull(const hardware::hidl_handle &handle) {
+    return handle == nullptr;
+}
+
+inline bool isHandleNull(const aidl::android::hardware::common::NativeHandle &handle) {
+    return (handle.fds.size() == 0) && (handle.ints.size() == 0);
+}
+
+inline size_t numFdsInHandle(const hardware::hidl_handle &handle) {
+    return handle->numFds;
+}
+
+inline size_t numFdsInHandle(const aidl::android::hardware::common::NativeHandle &handle) {
+    return handle.fds.size();
+}
+
+inline int32_t getHandleFirstFd(const hardware::hidl_handle &handle) {
+    if (handle->numFds != 1) {
+        return -1;
+    }
+    return handle->data[0];
+}
+
+inline int32_t getHandleFirstFd(const aidl::android::hardware::common::NativeHandle &handle) {
+    if (handle.fds.size() != 1) {
+        return -1;
+    }
+    return handle.fds[0].get();
+}
+
+inline const hardware::hidl_vec<uint8_t>&
+getResultMetadata(const android::hardware::camera::device::V3_2::CameraMetadata &result) {
+    return result;
+}
+
+inline const std::vector<uint8_t>&
+getResultMetadata(const aidl::android::hardware::camera::device::CameraMetadata &result) {
+    return result.metadata;
+}
+
+// Fmqpayload type is needed since AIDL generates an fmq of payload type int8_t
+// for a byte fmq vs MetadataType which is uint8_t. For HIDL, the same type is
+// generated for metadata and fmq payload : uint8_t.
+template <class StatesType, class CaptureResultType, class PhysMetadataType, class MetadataType,
+         class FmqType, class BufferStatusType, class FmqPayloadType = uint8_t>
+void processOneCaptureResultLockedT(
+        StatesType& states,
+        const CaptureResultType& result,
+        const PhysMetadataType &physicalCameraMetadata) {
+    std::unique_ptr<FmqType>& fmq = states.fmq;
+    BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
+    camera_capture_result r;
+    status_t res;
+    r.frame_number = result.frameNumber;
+
+    // Read and validate the result metadata.
+    MetadataType resultMetadata;
+    res = readOneCameraMetadataLockedT<FmqType, FmqPayloadType, MetadataType>(
+            fmq, result.fmqResultSize,
+            resultMetadata, getResultMetadata(result.result));
+    if (res != OK) {
+        ALOGE("%s: Frame %d: Failed to read capture result metadata",
+                __FUNCTION__, result.frameNumber);
+        return;
+    }
+    r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
+
+    // Read and validate physical camera metadata
+    size_t physResultCount = physicalCameraMetadata.size();
+    std::vector<const char*> physCamIds(physResultCount);
+    std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
+    std::vector<MetadataType> physResultMetadata;
+    physResultMetadata.resize(physResultCount);
+    for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
+        res = readOneCameraMetadataLockedT<FmqType, FmqPayloadType, MetadataType>(fmq,
+                physicalCameraMetadata[i].fmqMetadataSize,
+                physResultMetadata[i], getResultMetadata(physicalCameraMetadata[i].metadata));
+        if (res != OK) {
+            ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
+                    __FUNCTION__, result.frameNumber,
+                    physicalCameraMetadata[i].physicalCameraId.c_str());
+            return;
+        }
+        physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
+        phyCamMetadatas[i] =
+                reinterpret_cast<const camera_metadata_t*>(physResultMetadata[i].data());
+    }
+    r.num_physcam_metadata = physResultCount;
+    r.physcam_ids = physCamIds.data();
+    r.physcam_metadata = phyCamMetadatas.data();
+
+    std::vector<camera_stream_buffer_t> outputBuffers(result.outputBuffers.size());
+    std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
+    for (size_t i = 0; i < result.outputBuffers.size(); i++) {
+        auto& bDst = outputBuffers[i];
+        const auto &bSrc = result.outputBuffers[i];
+
+        sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
+        if (stream == nullptr) {
+            ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
+                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+            return;
+        }
+        bDst.stream = stream->asHalStream();
+
+        bool noBufferReturned = false;
+        buffer_handle_t *buffer = nullptr;
+        if (states.useHalBufManager) {
+            // This is suspicious most of the time but can be correct during flush where HAL
+            // has to return capture result before a buffer is requested
+            if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
+                if (bSrc.status == BufferStatusType::OK) {
+                    ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
+                            __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+                    // Still proceeds so other buffers can be returned
+                }
+                noBufferReturned = true;
+            }
+            if (noBufferReturned) {
+                res = OK;
+            } else {
+                res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
+            }
+        } else {
+            res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
+        }
+
+        if (res != OK) {
+            ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
+                    __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+            return;
+        }
+
+        bDst.buffer = buffer;
+        bDst.status = mapBufferStatus<BufferStatusType>(bSrc.status);
+        bDst.acquire_fence = -1;
+        if (isHandleNull(bSrc.releaseFence)) {
+            bDst.release_fence = -1;
+        } else if (numFdsInHandle(bSrc.releaseFence) == 1) {
+            if (noBufferReturned) {
+                ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
+            }
+            bDst.release_fence = dup(getHandleFirstFd(bSrc.releaseFence));
+        } else {
+            ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
+                    __FUNCTION__, result.frameNumber, i, (int)numFdsInHandle(bSrc.releaseFence));
+            return;
+        }
+    }
+    r.num_output_buffers = outputBuffers.size();
+    r.output_buffers = outputBuffers.data();
+
+    camera_stream_buffer_t inputBuffer;
+    if (result.inputBuffer.streamId == -1) {
+        r.input_buffer = nullptr;
+    } else {
+        if (states.inputStream->getId() != result.inputBuffer.streamId) {
+            ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
+                    result.frameNumber, result.inputBuffer.streamId);
+            return;
+        }
+        inputBuffer.stream = states.inputStream->asHalStream();
+        buffer_handle_t *buffer;
+        res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
+                &buffer);
+        if (res != OK) {
+            ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
+                    __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
+            return;
+        }
+        inputBuffer.buffer = buffer;
+        inputBuffer.status = mapBufferStatus<BufferStatusType>(result.inputBuffer.status);
+        inputBuffer.acquire_fence = -1;
+        if (isHandleNull(result.inputBuffer.releaseFence)) {
+            inputBuffer.release_fence = -1;
+        } else if (numFdsInHandle(result.inputBuffer.releaseFence) == 1) {
+            inputBuffer.release_fence = dup(getHandleFirstFd(result.inputBuffer.releaseFence));
+        } else {
+            ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
+                    __FUNCTION__, result.frameNumber,
+                    (int)numFdsInHandle(result.inputBuffer.releaseFence));
+            return;
+        }
+        r.input_buffer = &inputBuffer;
+    }
+
+    r.partial_result = result.partialResult;
+
+    processCaptureResult(states, &r);
+}
+
+template <class VecStreamBufferType>
+void returnStreamBuffersT(ReturnBufferStates& states,
+        const VecStreamBufferType& buffers) {
+    if (!states.useHalBufManager) {
+        ALOGE("%s: Camera %s does not support HAL buffer managerment",
+                __FUNCTION__, states.cameraId.string());
+        return;
+    }
+
+    for (const auto& buf : buffers) {
+        if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
+            ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
+            continue;
+        }
+
+        buffer_handle_t* buffer;
+        status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
+
+        if (res != OK) {
+            ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
+                    __FUNCTION__, buf.bufferId, buf.streamId);
+            continue;
+        }
+
+        camera_stream_buffer_t streamBuffer;
+        streamBuffer.buffer = buffer;
+        streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
+        streamBuffer.acquire_fence = -1;
+        streamBuffer.release_fence = -1;
+
+        if (isHandleNull(buf.releaseFence)) {
+            streamBuffer.release_fence = -1;
+        } else if (numFdsInHandle(buf.releaseFence) == 1) {
+            streamBuffer.release_fence = dup(getHandleFirstFd(buf.releaseFence));
+        } else {
+            ALOGE("%s: Invalid release fence, fd count is %d, not 1",
+                    __FUNCTION__, (int)numFdsInHandle(buf.releaseFence));
+            continue;
+        }
+
+        sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
+        if (stream == nullptr) {
+            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
+            continue;
+        }
+        streamBuffer.stream = stream->asHalStream();
+        returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
+                &streamBuffer, /*size*/1, /*timestamp*/ 0, /*readoutTimestamp*/0,
+                /*requested*/false, /*requestTimeNs*/0, states.sessionStatsBuilder);
+    }
+}
+
+} // camera3
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 15cf7f4..9215f23 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -32,11 +32,15 @@
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation,
         nsecs_t timestampOffset, const String8& physicalCameraId,
-        const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool useHalBufManager) :
+        const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
+        int setId, bool useHalBufManager, int64_t dynamicProfile,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+        int mirrorMode) :
         Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
                             format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
-                            consumerUsage, timestampOffset, setId),
+                            transport, consumerUsage, timestampOffset, setId,
+                            /*isMultiResolution*/false, dynamicProfile, streamUseCase,
+                            deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
         mUseHalBufManager(useHalBufManager) {
     size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
     if (surfaces.size() > consumerCount) {
@@ -67,7 +71,7 @@
     }
 
     res = mStreamSplitter->connect(initialSurfaces, usage, mUsage, camera_stream::max_buffers,
-            getWidth(), getHeight(), getFormat(), &mConsumer);
+            getWidth(), getHeight(), getFormat(), &mConsumer, camera_stream::dynamic_range_profile);
     if (res != OK) {
         ALOGE("%s: Failed to connect to stream splitter: %s(%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -247,7 +251,7 @@
         return res;
     }
 
-    res = configureConsumerQueueLocked();
+    res = configureConsumerQueueLocked(false/*allowPreviewRespace*/);
     if (res != OK) {
         ALOGE("Failed to configureConsumerQueueLocked: %s(%d)", strerror(-res), res);
         return res;
@@ -388,13 +392,15 @@
         bool sizeMismatch = ((static_cast<uint32_t>(infoIt.width) != getWidth()) ||
                                 (static_cast<uint32_t> (infoIt.height) != getHeight())) ?
                                 true : false;
-        if ((imgReaderUsage && sizeMismatch) ||
+        bool dynamicRangeMismatch = dynamic_range_profile != infoIt.dynamicRangeProfile;
+        if ((imgReaderUsage && sizeMismatch) || dynamicRangeMismatch ||
                 (infoIt.format != getOriginalFormat() && infoIt.format != getFormat()) ||
                 (infoIt.dataSpace != getDataSpace() &&
                  infoIt.dataSpace != getOriginalDataSpace())) {
-            ALOGE("%s: Shared surface parameters format: 0x%x dataSpace: 0x%x "
-                    " don't match source stream format: 0x%x  dataSpace: 0x%x", __FUNCTION__,
-                    infoIt.format, infoIt.dataSpace, getFormat(), getDataSpace());
+            ALOGE("%s: Shared surface parameters format: 0x%x dataSpace: 0x%x dynamic range 0x%"
+                    PRIx64 " don't match source stream format: 0x%x  dataSpace: 0x%x dynamic"
+                    " range 0x%" PRIx64 , __FUNCTION__, infoIt.format, infoIt.dataSpace,
+                    infoIt.dynamicRangeProfile, getFormat(), getDataSpace(), dynamic_range_profile);
             return BAD_VALUE;
         }
     }
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 4b6341b..aac3c2a 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -38,9 +38,14 @@
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
             const String8& physicalCameraId,
-            const std::unordered_set<int32_t> &sensorPixelModesUsed,
+            const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
-            bool useHalBufManager = false);
+            bool useHalBufManager = false,
+            int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+            int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+            bool deviceTimeBaseIsRealtime = false,
+            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+            int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
 
     virtual ~Camera3SharedOutputStream();
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index afcfd2a..7ad6649 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+#include <vector>
+#include "system/window.h"
 #define LOG_TAG "Camera3-Stream"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -23,6 +25,7 @@
 #include "device3/Camera3Stream.h"
 #include "device3/StatusTracker.h"
 #include "utils/TraceHFR.h"
+#include "ui/GraphicBufferMapper.h"
 
 #include <cutils/properties.h>
 
@@ -51,7 +54,8 @@
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
         const String8& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
-        int setId, bool isMultiResolution) :
+        int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
+        int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
     camera_stream(),
     mId(id),
     mSetId(setId),
@@ -76,7 +80,9 @@
     mOriginalDataSpace(dataSpace),
     mPhysicalCameraId(physicalCameraId),
     mLastTimestamp(0),
-    mIsMultiResolution(isMultiResolution) {
+    mIsMultiResolution(isMultiResolution),
+    mDeviceTimeBaseIsRealtime(deviceTimeBaseIsRealtime),
+    mTimestampBase(timestampBase) {
 
     camera_stream::stream_type = type;
     camera_stream::width = width;
@@ -87,6 +93,8 @@
     camera_stream::max_buffers = 0;
     camera_stream::physical_camera_id = mPhysicalCameraId.string();
     camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
+    camera_stream::dynamic_range_profile = dynamicRangeProfile;
+    camera_stream::use_case = streamUseCase;
 
     if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
             maxSize == 0) {
@@ -147,6 +155,10 @@
     return mOriginalFormat;
 }
 
+int64_t Camera3Stream::getDynamicRangeProfile() const {
+    return camera_stream::dynamic_range_profile;
+}
+
 void Camera3Stream::setDataSpaceOverride(bool dataSpaceOverridden) {
     mDataSpaceOverridden = dataSpaceOverridden;
 }
@@ -167,6 +179,18 @@
     return camera_stream::max_buffers;
 }
 
+int64_t Camera3Stream::getStreamUseCase() const {
+    return camera_stream::use_case;
+}
+
+int Camera3Stream::getTimestampBase() const {
+    return mTimestampBase;
+}
+
+bool Camera3Stream::isDeviceTimeBaseRealtime() const {
+    return mDeviceTimeBaseIsRealtime;
+}
+
 void Camera3Stream::setOfflineProcessingSupport(bool support) {
     mSupportOfflineProcessing = support;
 }
@@ -557,7 +581,8 @@
     for (size_t i = 0; i < mPreparedBufferIdx; i++) {
         mPreparedBuffers.editItemAt(i).release_fence = -1;
         mPreparedBuffers.editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
-        returnBufferLocked(mPreparedBuffers[i], 0, /*transform*/ -1);
+        returnBufferLocked(mPreparedBuffers[i], /*timestamp*/0, /*readoutTimestamp*/0,
+                /*transform*/ -1);
     }
     mPreparedBuffers.clear();
     mPreparedBufferIdx = 0;
@@ -713,7 +738,7 @@
 }
 
 status_t Camera3Stream::returnBuffer(const camera_stream_buffer &buffer,
-        nsecs_t timestamp, bool timestampIncreasing,
+        nsecs_t timestamp, nsecs_t readoutTimestamp, bool timestampIncreasing,
          const std::vector<size_t>& surface_ids, uint64_t frameNumber, int32_t transform) {
     ATRACE_HFR_CALL();
     Mutex::Autolock l(mLock);
@@ -743,7 +768,7 @@
      *
      * Do this for getBuffer as well.
      */
-    status_t res = returnBufferLocked(b, timestamp, transform, surface_ids);
+    status_t res = returnBufferLocked(b, timestamp, readoutTimestamp, transform, surface_ids);
     if (res == OK) {
         fireBufferListenersLocked(b, /*acquired*/false, /*output*/true, timestamp, frameNumber);
     }
@@ -931,7 +956,7 @@
 }
 
 status_t Camera3Stream::returnBufferLocked(const camera_stream_buffer &,
-                                           nsecs_t, int32_t, const std::vector<size_t>&) {
+                                           nsecs_t, nsecs_t, int32_t, const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
     return INVALID_OPERATION;
 }
@@ -1077,6 +1102,52 @@
     return res;
 }
 
+void Camera3Stream::queueHDRMetadata(buffer_handle_t buffer, sp<ANativeWindow>& anw,
+        int64_t dynamicRangeProfile) {
+    auto& mapper = GraphicBufferMapper::get();
+    switch (dynamicRangeProfile) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10: {
+            std::optional<ui::Smpte2086> smpte2086;
+            auto res = mapper.getSmpte2086(buffer, &smpte2086);
+            if ((res == OK) && smpte2086.has_value()) {
+                const auto& metaValue = smpte2086.value();
+                android_smpte2086_metadata meta = {
+                    .displayPrimaryRed.x = metaValue.primaryRed.x,
+                    .displayPrimaryRed.y = metaValue.primaryRed.y,
+                    .displayPrimaryGreen.x = metaValue.primaryGreen.x,
+                    .displayPrimaryGreen.y = metaValue.primaryGreen.y,
+                    .displayPrimaryBlue.x = metaValue.primaryBlue.x,
+                    .displayPrimaryBlue.y = metaValue.primaryBlue.y,
+                    .whitePoint.x = metaValue.whitePoint.x,
+                    .whitePoint.y = metaValue.whitePoint.y,
+                    .maxLuminance = metaValue.maxLuminance,
+                    .minLuminance = metaValue.minLuminance};
+                native_window_set_buffers_smpte2086_metadata(anw.get(), &meta);
+            } else {
+                ALOGE("%s Couldn't retrieve Smpte2086 metadata %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            }
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS: {
+            std::optional<std::vector<uint8_t>> smpte2094_40;
+            auto res = mapper.getSmpte2094_40(buffer, &smpte2094_40);
+            if ((res == OK) && smpte2094_40.has_value()) {
+                native_window_set_buffers_hdr10_plus_metadata(anw.get(),
+                        smpte2094_40.value().size(), smpte2094_40.value().data());
+            } else {
+                ALOGE("%s Couldn't retrieve Smpte2094_40 metadata %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            }
+            break;
+        }
+        default:
+            // No-op
+            break;
+    }
+}
+
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index fc75f79..d429e6c 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -138,6 +138,10 @@
     static Camera3Stream*       cast(camera_stream *stream);
     static const Camera3Stream* cast(const camera_stream *stream);
 
+    // Queue corresponding HDR metadata to given native window.
+    static void queueHDRMetadata(buffer_handle_t buffer, sp<ANativeWindow>& anw,
+            int64_t dynamicRangeProfile);
+
     /**
      * Get the stream's ID
      */
@@ -168,11 +172,15 @@
     void              setFormatOverride(bool formatOverriden);
     bool              isFormatOverridden() const;
     int               getOriginalFormat() const;
+    int64_t           getDynamicRangeProfile() const;
     void              setDataSpaceOverride(bool dataSpaceOverriden);
     bool              isDataSpaceOverridden() const;
     android_dataspace getOriginalDataSpace() const;
     int               getMaxHalBuffers() const;
     const String8&    physicalCameraId() const;
+    int64_t           getStreamUseCase() const;
+    int               getTimestampBase() const;
+    bool              isDeviceTimeBaseRealtime() const;
 
     void              setOfflineProcessingSupport(bool) override;
     bool              getOfflineProcessingSupport() const override;
@@ -352,7 +360,7 @@
      * For bidirectional streams, this method applies to the output-side buffers
      */
     status_t         returnBuffer(const camera_stream_buffer &buffer,
-            nsecs_t timestamp, bool timestampIncreasing,
+            nsecs_t timestamp, nsecs_t readoutTimestamp, bool timestampIncreasing,
             const std::vector<size_t>& surface_ids = std::vector<size_t>(),
             uint64_t frameNumber = 0, int32_t transform = -1);
 
@@ -500,7 +508,8 @@
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
             const String8& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
-            int setId, bool isMultiResolution);
+            int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
+            int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
 
     wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
 
@@ -517,7 +526,7 @@
     virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
     virtual status_t returnBufferLocked(const camera_stream_buffer &buffer,
-            nsecs_t timestamp, int32_t transform,
+            nsecs_t timestamp, nsecs_t readoutTimestamp, int32_t transform,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
@@ -623,6 +632,9 @@
 
     bool mIsMultiResolution = false;
     bool mSupportOfflineProcessing = false;
+
+    bool mDeviceTimeBaseIsRealtime;
+    int mTimestampBase;
 }; // class Camera3Stream
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 3aa5a3c..6812e89 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -19,6 +19,7 @@
 
 #include <utils/RefBase.h>
 
+#include <camera/camera2/OutputConfiguration.h>
 #include <camera/CameraMetadata.h>
 #include "Camera3StreamBufferListener.h"
 #include "Camera3StreamBufferFreedListener.h"
@@ -64,6 +65,8 @@
     const char* physical_camera_id;
 
     std::unordered_set<int32_t> sensor_pixel_modes_used;
+    int64_t dynamic_range_profile;
+    int64_t use_case;
 } camera_stream_t;
 
 typedef struct camera_stream_buffer {
@@ -107,14 +110,61 @@
         bool finalized = false;
         bool supportsOffline = false;
         std::unordered_set<int32_t> sensorPixelModesUsed;
+        int64_t dynamicRangeProfile;
+        int64_t streamUseCase;
+        int timestampBase;
+        int mirrorMode;
         OutputStreamInfo() :
             width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
-            consumerUsage(0) {}
+            consumerUsage(0),
+            dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+            streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+            timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
+            mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
         OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
-                uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed) :
+                uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
+                int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
             width(_width), height(_height), format(_format),
             dataSpace(_dataSpace), consumerUsage(_consumerUsage),
-            sensorPixelModesUsed(_sensorPixelModesUsed) {}
+            sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
+            streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+};
+
+// Utility class to lock and unlock a GraphicBuffer
+class GraphicBufferLocker {
+public:
+    GraphicBufferLocker(sp<GraphicBuffer> buffer) : _buffer(buffer) {}
+
+    status_t lockAsync(uint32_t usage, void** dstBuffer, int fenceFd) {
+        if (_buffer == nullptr) return BAD_VALUE;
+
+        status_t res = OK;
+        if (!_locked) {
+            status_t res =  _buffer->lockAsync(usage, dstBuffer, fenceFd);
+            if (res == OK) {
+                _locked = true;
+            }
+        }
+        return res;
+    }
+
+    status_t lockAsync(void** dstBuffer, int fenceFd) {
+        return lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, dstBuffer, fenceFd);
+    }
+
+    ~GraphicBufferLocker() {
+        if (_locked && _buffer != nullptr) {
+            auto res = _buffer->unlock();
+            if (res != OK) {
+                ALOGE("%s: Error trying to unlock buffer: %s (%d)", __FUNCTION__,
+                        strerror(-res), res);
+            }
+        }
+    }
+
+private:
+    sp<GraphicBuffer> _buffer;
+    bool _locked = false;
 };
 
 /**
@@ -154,6 +204,7 @@
     virtual uint32_t getWidth() const = 0;
     virtual uint32_t getHeight() const = 0;
     virtual int      getFormat() const = 0;
+    virtual int64_t  getDynamicRangeProfile() const = 0;
     virtual android_dataspace getDataSpace() const = 0;
     virtual void setFormatOverride(bool formatOverriden) = 0;
     virtual bool isFormatOverridden() const = 0;
@@ -357,7 +408,7 @@
      * For bidirectional streams, this method applies to the output-side buffers
      */
     virtual status_t returnBuffer(const camera_stream_buffer &buffer,
-            nsecs_t timestamp, bool timestampIncreasing = true,
+            nsecs_t timestamp, nsecs_t readoutTimestamp, bool timestampIncreasing = true,
             const std::vector<size_t>& surface_ids = std::vector<size_t>(),
             uint64_t frameNumber = 0, int32_t transform = -1) = 0;
 
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 5c6c518..fd23958 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -34,13 +34,16 @@
 
 #include <cutils/atomic.h>
 
+#include "Camera3Stream.h"
+
 #include "Camera3StreamSplitter.h"
 
 namespace android {
 
 status_t Camera3StreamSplitter::connect(const std::unordered_map<size_t, sp<Surface>> &surfaces,
         uint64_t consumerUsage, uint64_t producerUsage, size_t halMaxBuffers, uint32_t width,
-        uint32_t height, android::PixelFormat format, sp<Surface>* consumer) {
+        uint32_t height, android::PixelFormat format, sp<Surface>* consumer,
+        int64_t dynamicRangeProfile) {
     ATRACE_CALL();
     if (consumer == nullptr) {
         SP_LOGE("%s: consumer pointer is NULL", __FUNCTION__);
@@ -61,6 +64,7 @@
 
     mMaxHalBuffers = halMaxBuffers;
     mConsumerName = getUniqueConsumerName();
+    mDynamicRangeProfile = dynamicRangeProfile;
     // Add output surfaces. This has to be before creating internal buffer queue
     // in order to get max consumer side buffers.
     for (auto &it : surfaces) {
@@ -136,6 +140,7 @@
         }
     }
     mOutputs.clear();
+    mOutputSurfaces.clear();
     mOutputSlots.clear();
     mConsumerBufferCount.clear();
 
@@ -258,6 +263,7 @@
 
     // Add new entry into mOutputs
     mOutputs[surfaceId] = gbp;
+    mOutputSurfaces[surfaceId] = outputQueue;
     mConsumerBufferCount[surfaceId] = maxConsumerBuffers;
     if (mConsumerBufferCount[surfaceId] > mMaxHalBuffers) {
         SP_LOGW("%s: Consumer buffer count %zu larger than max. Hal buffers: %zu", __FUNCTION__,
@@ -316,6 +322,7 @@
         }
     }
     mOutputs[surfaceId] = nullptr;
+    mOutputSurfaces[surfaceId] = nullptr;
     mOutputSlots[gbp] = nullptr;
     for (const auto &id : pendingBufferIds) {
         decrementBufRefCountLocked(id, surfaceId);
@@ -356,6 +363,14 @@
     const BufferTracker& tracker = *(mBuffers[bufferId]);
     int slot = getSlotForOutputLocked(output, tracker.getBuffer());
 
+    if (mOutputSurfaces[surfaceId] != nullptr) {
+        sp<ANativeWindow> anw = mOutputSurfaces[surfaceId];
+        camera3::Camera3Stream::queueHDRMetadata(
+                bufferItem.mGraphicBuffer->getNativeBuffer()->handle, anw, mDynamicRangeProfile);
+    } else {
+        SP_LOGE("%s: Invalid surface id: %zu!", __FUNCTION__, surfaceId);
+    }
+
     // In case the output BufferQueue has its own lock, if we hold splitter lock while calling
     // queueBuffer (which will try to acquire the output lock), the output could be holding its
     // own lock calling releaseBuffer (which  will try to acquire the splitter lock), running into
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 4eb455a..0f728a0 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -19,6 +19,8 @@
 
 #include <unordered_set>
 
+#include <camera/CameraMetadata.h>
+
 #include <gui/IConsumerListener.h>
 #include <gui/IProducerListener.h>
 #include <gui/BufferItemConsumer.h>
@@ -55,7 +57,8 @@
     // with output surfaces.
     status_t connect(const std::unordered_map<size_t, sp<Surface>> &surfaces,
             uint64_t consumerUsage, uint64_t producerUsage, size_t halMaxBuffers, uint32_t width,
-            uint32_t height, android::PixelFormat format, sp<Surface>* consumer);
+            uint32_t height, android::PixelFormat format, sp<Surface>* consumer,
+            int64_t dynamicRangeProfile);
 
     // addOutput adds an output BufferQueue to the splitter. The splitter
     // connects to outputQueue as a CPU producer, and any buffers queued
@@ -232,6 +235,7 @@
     uint32_t mHeight = 0;
     android::PixelFormat mFormat = android::PIXEL_FORMAT_NONE;
     uint64_t mProducerUsage = 0;
+    int mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
 
     // The attachBuffer call will happen on different thread according to mUseHalBufManager and have
     // different timing constraint.
@@ -251,6 +255,9 @@
     //Map surface ids -> gbp outputs
     std::unordered_map<int, sp<IGraphicBufferProducer> > mOutputs;
 
+    //Map surface ids -> gbp outputs
+    std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+
     //Map surface ids -> consumer buffer count
     std::unordered_map<int, size_t > mConsumerBufferCount;
 
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 89dd115..15807bf 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -22,7 +22,7 @@
 #include <cmath>
 
 #include "device3/DistortionMapper.h"
-#include "utils/SessionConfigurationUtils.h"
+#include "utils/SessionConfigurationUtilsHost.h"
 
 namespace android {
 
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 42fa8db..493a9e2 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -30,6 +30,67 @@
 
 namespace camera3 {
 
+typedef struct camera_stream_configuration {
+    uint32_t num_streams;
+    camera_stream_t **streams;
+    uint32_t operation_mode;
+    bool input_is_multi_resolution;
+} camera_stream_configuration_t;
+
+typedef struct camera_capture_request {
+    uint32_t frame_number;
+    const camera_metadata_t *settings;
+    camera_stream_buffer_t *input_buffer;
+    uint32_t num_output_buffers;
+    const camera_stream_buffer_t *output_buffers;
+    uint32_t num_physcam_settings;
+    const char **physcam_id;
+    const camera_metadata_t **physcam_settings;
+    int32_t input_width;
+    int32_t input_height;
+} camera_capture_request_t;
+
+typedef struct camera_capture_result {
+    uint32_t frame_number;
+    const camera_metadata_t *result;
+    uint32_t num_output_buffers;
+    const camera_stream_buffer_t *output_buffers;
+    const camera_stream_buffer_t *input_buffer;
+    uint32_t partial_result;
+    uint32_t num_physcam_metadata;
+    const char **physcam_ids;
+    const camera_metadata_t **physcam_metadata;
+} camera_capture_result_t;
+
+typedef struct camera_shutter_msg {
+    uint32_t frame_number;
+    uint64_t timestamp;
+    uint64_t readout_timestamp;
+} camera_shutter_msg_t;
+
+typedef struct camera_error_msg {
+    uint32_t frame_number;
+    camera_stream_t *error_stream;
+    int error_code;
+} camera_error_msg_t;
+
+typedef enum camera_error_msg_code {
+    CAMERA_MSG_ERROR_DEVICE = 1,
+    CAMERA_MSG_ERROR_REQUEST = 2,
+    CAMERA_MSG_ERROR_RESULT = 3,
+    CAMERA_MSG_ERROR_BUFFER = 4,
+    CAMERA_MSG_NUM_ERRORS
+} camera_error_msg_code_t;
+
+typedef struct camera_notify_msg {
+    int type;
+
+    union {
+        camera_error_msg_t error;
+        camera_shutter_msg_t shutter;
+    } message;
+} camera_notify_msg_t;
+
 typedef enum {
     // Cache the buffers with STATUS_ERROR within InFlightRequest
     ERROR_BUF_CACHE,
@@ -41,9 +102,10 @@
 } ERROR_BUF_STRATEGY;
 
 struct InFlightRequest {
-
     // Set by notify() SHUTTER call.
     nsecs_t shutterTimestamp;
+    // Set by notify() SHUTTER call with readout time.
+    nsecs_t shutterReadoutTimestamp;
     // Set by process_capture_result().
     nsecs_t sensorTimestamp;
     int     requestStatus;
@@ -81,6 +143,11 @@
     // is not for constrained high speed recording, this flag will also be true.
     bool hasCallback;
 
+    // Minimum expected frame duration for this request
+    // For manual captures, equal to the max of requested exposure time and frame duration
+    // For auto-exposure modes, equal to 1/(higher end of target FPS range)
+    nsecs_t minExpectedDuration;
+
     // Maximum expected frame duration for this request.
     // For manual captures, equal to the max of requested exposure time and frame duration
     // For auto-exposure modes, equal to 1/(lower end of target FPS range)
@@ -125,8 +192,8 @@
     // Current output transformation
     int32_t transform;
 
-    // TODO: dedupe
-    static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
+    static const nsecs_t kDefaultMinExpectedDuration = 33333333; // 33 ms
+    static const nsecs_t kDefaultMaxExpectedDuration = 100000000; // 100 ms
 
     // Default constructor needed by KeyedVector
     InFlightRequest() :
@@ -137,7 +204,8 @@
             numBuffersLeft(0),
             hasInputBuffer(false),
             hasCallback(true),
-            maxExpectedDuration(kDefaultExpectedDuration),
+            minExpectedDuration(kDefaultMinExpectedDuration),
+            maxExpectedDuration(kDefaultMaxExpectedDuration),
             skipResultMetadata(false),
             errorBufStrategy(ERROR_BUF_CACHE),
             stillCapture(false),
@@ -148,7 +216,7 @@
     }
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
-            bool hasAppCallback, nsecs_t maxDuration,
+            bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration,
             const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
             nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
@@ -160,6 +228,7 @@
             resultExtras(extras),
             hasInputBuffer(hasInput),
             hasCallback(hasAppCallback),
+            minExpectedDuration(minDuration),
             maxExpectedDuration(maxDuration),
             skipResultMetadata(false),
             errorBufStrategy(ERROR_BUF_CACHE),
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
new file mode 100644
index 0000000..0439501
--- /dev/null
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-PreviewFrameSpacer"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+
+#include "PreviewFrameSpacer.h"
+#include "Camera3OutputStream.h"
+
+namespace android {
+
+namespace camera3 {
+
+PreviewFrameSpacer::PreviewFrameSpacer(wp<Camera3OutputStream> parent, sp<Surface> consumer) :
+        mParent(parent),
+        mConsumer(consumer) {
+}
+
+PreviewFrameSpacer::~PreviewFrameSpacer() {
+}
+
+status_t PreviewFrameSpacer::queuePreviewBuffer(nsecs_t timestamp, nsecs_t readoutTimestamp,
+        int32_t transform, ANativeWindowBuffer* anwBuffer, int releaseFence) {
+    Mutex::Autolock l(mLock);
+    mPendingBuffers.emplace(timestamp, readoutTimestamp, transform, anwBuffer, releaseFence);
+    ALOGV("%s: mPendingBuffers size %zu, timestamp %" PRId64 ", readoutTime %" PRId64,
+            __FUNCTION__, mPendingBuffers.size(), timestamp, readoutTimestamp);
+
+    mBufferCond.signal();
+    return OK;
+}
+
+bool PreviewFrameSpacer::threadLoop() {
+    Mutex::Autolock l(mLock);
+    if (mPendingBuffers.size() == 0) {
+        mBufferCond.waitRelative(mLock, kWaitDuration);
+        if (exitPending()) {
+            return false;
+        } else {
+            return true;
+        }
+    }
+
+    nsecs_t currentTime = systemTime();
+    auto buffer = mPendingBuffers.front();
+    nsecs_t readoutInterval = buffer.readoutTimestamp - mLastCameraReadoutTime;
+    // If the readout interval exceeds threshold, directly queue
+    // cached buffer.
+    if (readoutInterval >= kFrameIntervalThreshold) {
+        mPendingBuffers.pop();
+        queueBufferToClientLocked(buffer, currentTime);
+        return true;
+    }
+
+    // Cache the frame to match readout time interval, for up to 33ms
+    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+    nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
+    if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
+        mBufferCond.waitRelative(mLock, frameWaitTime);
+        if (exitPending()) {
+            return false;
+        }
+        currentTime = systemTime();
+    }
+    ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+            ", timestamp %" PRId64, __FUNCTION__, readoutInterval,
+            currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+    mPendingBuffers.pop();
+    queueBufferToClientLocked(buffer, currentTime);
+    return true;
+}
+
+void PreviewFrameSpacer::requestExit() {
+    // Call parent to set up shutdown
+    Thread::requestExit();
+    // Exit from other possible wait
+    mBufferCond.signal();
+}
+
+void PreviewFrameSpacer::queueBufferToClientLocked(
+        const BufferHolder& bufferHolder, nsecs_t currentTime) {
+    sp<Camera3OutputStream> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
+        return;
+    }
+
+    parent->setTransform(bufferHolder.transform, true/*mayChangeMirror*/);
+
+    status_t res = native_window_set_buffers_timestamp(mConsumer.get(), bufferHolder.timestamp);
+    if (res != OK) {
+        ALOGE("%s: Preview Stream: Error setting timestamp: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+    }
+
+    Camera3Stream::queueHDRMetadata(bufferHolder.anwBuffer.get()->handle, mConsumer,
+            parent->getDynamicRangeProfile());
+
+    res = mConsumer->queueBuffer(mConsumer.get(), bufferHolder.anwBuffer.get(),
+            bufferHolder.releaseFence);
+    if (res != OK) {
+        close(bufferHolder.releaseFence);
+        if (parent->shouldLogError(res)) {
+            ALOGE("%s: Failed to queue buffer to client: %s(%d)", __FUNCTION__,
+                    strerror(-res), res);
+        }
+    }
+
+    mLastCameraPresentTime = currentTime;
+    mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
+}
+
+}; // namespace camera3
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
new file mode 100644
index 0000000..e165768
--- /dev/null
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_PREVIEWFRAMESPACER_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_PREVIEWFRAMESPACER_H
+
+#include <queue>
+
+#include <gui/Surface.h>
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+#include <utils/Thread.h>
+#include <utils/Timers.h>
+
+namespace android {
+
+namespace camera3 {
+
+class Camera3OutputStream;
+
+/***
+ * Preview stream spacer for better frame spacing
+ *
+ * The ideal viewfinder user experience is that frames are presented to the
+ * user in the same cadence as outputed by the camera sensor. However, the
+ * processing latency between frames could vary, due to factors such
+ * as CPU load, differences in request settings, etc. This frame processing
+ * latency results in variation in presentation of frames to the user.
+ *
+ * The PreviewFrameSpacer improves the viewfinder user experience by:
+ * - Cache the frame buffers if the intervals between queueBuffer is shorter
+ *   than the camera readout intervals.
+ * - Queue frame buffers in the same cadence as the camera readout time.
+ * - Maintain at most 1 queue-able buffer. If the 2nd preview buffer becomes
+ *   available, queue the oldest cached buffer to the buffer queue.
+ */
+class PreviewFrameSpacer : public Thread {
+  public:
+    explicit PreviewFrameSpacer(wp<Camera3OutputStream> parent, sp<Surface> consumer);
+    virtual ~PreviewFrameSpacer();
+
+    // Queue preview buffer locally
+    status_t queuePreviewBuffer(nsecs_t timestamp, nsecs_t readoutTimestamp,
+            int32_t transform, ANativeWindowBuffer* anwBuffer, int releaseFence);
+
+    bool threadLoop() override;
+    void requestExit() override;
+
+  private:
+    // structure holding cached preview buffer info
+    struct BufferHolder {
+        nsecs_t timestamp;
+        nsecs_t readoutTimestamp;
+        int32_t transform;
+        sp<ANativeWindowBuffer> anwBuffer;
+        int releaseFence;
+
+        BufferHolder(nsecs_t t, nsecs_t readoutT, int32_t tr, ANativeWindowBuffer* anwb, int rf) :
+                timestamp(t), readoutTimestamp(readoutT), transform(tr), anwBuffer(anwb),
+                releaseFence(rf) {}
+    };
+
+    void queueBufferToClientLocked(const BufferHolder& bufferHolder, nsecs_t currentTime);
+
+    wp<Camera3OutputStream> mParent;
+    sp<ANativeWindow> mConsumer;
+    mutable Mutex mLock;
+    Condition mBufferCond;
+
+    std::queue<BufferHolder> mPendingBuffers;
+    nsecs_t mLastCameraReadoutTime = 0;
+    nsecs_t mLastCameraPresentTime = 0;
+    static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
+    static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
+    static constexpr nsecs_t kMaxFrameWaitTime = 33333333LL; // 33ms
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 7ec0956..27b00c9 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -20,7 +20,7 @@
 #include <algorithm>
 
 #include "device3/ZoomRatioMapper.h"
-#include "utils/SessionConfigurationUtils.h"
+#include "utils/SessionConfigurationUtilsHost.h"
 
 namespace android {
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
new file mode 100644
index 0000000..f05520f
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -0,0 +1,1583 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCamera3-Device"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macro for transient errors
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+            ##__VA_ARGS__)
+
+#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+            ##__VA_ARGS__)
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) setErrorState(   \
+    "%s: " fmt, __FUNCTION__,              \
+    ##__VA_ARGS__)
+#define SET_ERR_L(fmt, ...) setErrorStateLocked( \
+    "%s: " fmt, __FUNCTION__,                    \
+    ##__VA_ARGS__)
+
+#include <inttypes.h>
+
+#include <utility>
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Timers.h>
+#include <cutils/properties.h>
+
+#include <aidl/android/hardware/camera/device/ICameraInjectionSession.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+
+#include "utils/CameraTraces.h"
+#include "mediautils/SchedulingPolicyService.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3FakeStream.h"
+#include "device3/Camera3SharedOutputStream.h"
+#include "device3/aidl/AidlCamera3OutputUtils.h"
+#include "device3/aidl/AidlCamera3OfflineSession.h"
+#include "CameraService.h"
+#include "utils/CameraThreadState.h"
+#include "utils/SessionConfigurationUtils.h"
+#include "utils/TraceHFR.h"
+#include "utils/CameraServiceProxyWrapper.h"
+
+#include "../../common/aidl/AidlProviderInfo.h"
+
+#include <algorithm>
+
+#include "AidlCamera3Device.h"
+
+using namespace android::camera3;
+using namespace aidl::android::hardware;
+using aidl::android::hardware::camera::metadata::SensorPixelMode;
+using aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
+using aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
+
+namespace android {
+
+RequestAvailableDynamicRangeProfilesMap
+mapToAidlDynamicProfile(int64_t dynamicRangeProfile) {
+    return static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfile);
+}
+
+aidl::android::hardware::graphics::common::PixelFormat AidlCamera3Device::mapToAidlPixelFormat(
+        int frameworkFormat) {
+    return (aidl::android::hardware::graphics::common::PixelFormat) frameworkFormat;
+}
+
+aidl::android::hardware::graphics::common::Dataspace AidlCamera3Device::mapToAidlDataspace(
+        android_dataspace dataSpace) {
+    return (aidl::android::hardware::graphics::common::Dataspace)dataSpace;
+}
+
+aidl::android::hardware::graphics::common::BufferUsage AidlCamera3Device::mapToAidlConsumerUsage(
+        uint64_t usage) {
+    return (aidl::android::hardware::graphics::common::BufferUsage)usage;
+}
+
+aidl::android::hardware::camera::device::StreamRotation
+AidlCamera3Device::mapToAidlStreamRotation(camera_stream_rotation_t rotation) {
+    switch (rotation) {
+        case CAMERA_STREAM_ROTATION_0:
+            return aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
+        case CAMERA_STREAM_ROTATION_90:
+            return aidl::android::hardware::camera::device::StreamRotation::ROTATION_90;
+        case CAMERA_STREAM_ROTATION_180:
+            return aidl::android::hardware::camera::device::StreamRotation::ROTATION_180;
+        case CAMERA_STREAM_ROTATION_270:
+            return aidl::android::hardware::camera::device::StreamRotation::ROTATION_270;
+    }
+    ALOGE("%s: Unknown stream rotation %d", __FUNCTION__, rotation);
+    return aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
+}
+
+status_t AidlCamera3Device::mapToAidlStreamConfigurationMode(
+        camera_stream_configuration_mode_t operationMode,
+        aidl::android::hardware::camera::device::StreamConfigurationMode *mode) {
+    using StreamConfigurationMode =
+            aidl::android::hardware::camera::device::StreamConfigurationMode;
+    if (mode == nullptr) return BAD_VALUE;
+    if (operationMode < CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START) {
+        switch(operationMode) {
+            case CAMERA_STREAM_CONFIGURATION_NORMAL_MODE:
+                *mode = StreamConfigurationMode::NORMAL_MODE;
+                break;
+            case CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
+                *mode = StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE;
+                break;
+            default:
+                ALOGE("%s: Unknown stream configuration mode %d", __FUNCTION__, operationMode);
+                return BAD_VALUE;
+        }
+    } else {
+        *mode = static_cast<StreamConfigurationMode>(operationMode);
+    }
+    return OK;
+}
+
+int AidlCamera3Device::mapToFrameworkFormat(
+        aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
+    return static_cast<uint32_t>(pixelFormat);
+}
+
+android_dataspace AidlCamera3Device::mapToFrameworkDataspace(
+        aidl::android::hardware::graphics::common::Dataspace dataSpace) {
+    return static_cast<android_dataspace>(dataSpace);
+}
+
+uint64_t AidlCamera3Device::mapConsumerToFrameworkUsage(
+        aidl::android::hardware::graphics::common::BufferUsage usage) {
+    return (uint64_t)usage;
+}
+
+uint64_t AidlCamera3Device::mapProducerToFrameworkUsage(
+       aidl::android::hardware::graphics::common::BufferUsage usage) {
+    return (uint64_t)usage;
+}
+
+AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
+            bool legacyClient) : Camera3Device(id, overrideForPerfClass, legacyClient) {
+        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+}
+
+status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
+        const String8& monitorTags) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    ALOGV("%s: Initializing AIDL device for camera %s", __FUNCTION__, mId.string());
+    if (mStatus != STATUS_UNINITIALIZED) {
+        CLOGE("Already initialized!");
+        return INVALID_OPERATION;
+    }
+    if (manager == nullptr) return INVALID_OPERATION;
+
+    std::shared_ptr<camera::device::ICameraDeviceSession> session;
+    ATRACE_BEGIN("CameraHal::openSession");
+    status_t res = manager->openAidlSession(mId.string(), mCallbacks,
+            /*out*/ &session);
+    ATRACE_END();
+    if (res != OK) {
+        SET_ERR_L("Could not open camera session: %s (%d)", strerror(-res), res);
+        return res;
+    }
+    if (session == nullptr) {
+      SET_ERR("Session iface returned is null");
+      return INVALID_OPERATION;
+    }
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    if (res != OK) {
+        SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
+        session->close();
+        return res;
+    }
+    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
+
+    std::vector<std::string> physicalCameraIds;
+    bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
+    if (isLogical) {
+        for (auto& physicalId : physicalCameraIds) {
+            // Do not override characteristics for physical cameras
+            res = manager->getCameraCharacteristics(
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+            if (res != OK) {
+                SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
+                        physicalId.c_str(), strerror(-res), res);
+                session->close();
+                return res;
+            }
+
+            bool usePrecorrectArray =
+                    DistortionMapper::isDistortionSupported(mPhysicalDeviceInfoMap[physicalId]);
+            if (usePrecorrectArray) {
+                res = mDistortionMappers[physicalId].setupStaticInfo(
+                        mPhysicalDeviceInfoMap[physicalId]);
+                if (res != OK) {
+                    SET_ERR_L("Unable to read camera %s's calibration fields for distortion "
+                            "correction", physicalId.c_str());
+                    session->close();
+                    return res;
+                }
+            }
+
+            mZoomRatioMappers[physicalId] = ZoomRatioMapper(
+                    &mPhysicalDeviceInfoMap[physicalId],
+                    mSupportNativeZoomRatio, usePrecorrectArray);
+
+            if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+                    mPhysicalDeviceInfoMap[physicalId])) {
+                mUHRCropAndMeteringRegionMappers[physicalId] =
+                        UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
+                                usePrecorrectArray);
+            }
+        }
+    }
+
+    std::shared_ptr<AidlRequestMetadataQueue> queue;
+    ::aidl::android::hardware::common::fmq::MQDescriptor<
+            int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> desc;
+
+    ::ndk::ScopedAStatus requestQueueRet = session->getCaptureRequestMetadataQueue(&desc);
+    if (!requestQueueRet.isOk()) {
+        ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                requestQueueRet.getMessage());
+        return AidlProviderInfo::mapToStatusT(requestQueueRet);
+    }
+    queue = std::make_unique<AidlRequestMetadataQueue>(desc);
+    if (!queue->isValid() || queue->availableToWrite() <= 0) {
+        ALOGE("HAL returns empty result metadata fmq, not use it");
+        queue = nullptr;
+        // Don't use resQueue onwards.
+    }
+
+    std::unique_ptr<AidlResultMetadataQueue>& resQueue = mResultMetadataQueue;
+    ::aidl::android::hardware::common::fmq::MQDescriptor<
+        int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> resDesc;
+    ::ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&resDesc);
+    if (!resultQueueRet.isOk()) {
+        ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                resultQueueRet.getMessage());
+        return AidlProviderInfo::mapToStatusT(resultQueueRet);
+    }
+    resQueue = std::make_unique<AidlResultMetadataQueue>(resDesc);
+    if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+        ALOGE("HAL returns empty result metadata fmq, not use it");
+        resQueue = nullptr;
+        // Don't use resQueue onwards.
+    }
+
+    camera_metadata_entry bufMgrMode =
+            mDeviceInfo.find(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION);
+    if (bufMgrMode.count > 0) {
+        mUseHalBufManager = (bufMgrMode.data.u8[0] ==
+                ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
+    }
+
+    camera_metadata_entry_t capabilities = mDeviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    for (size_t i = 0; i < capabilities.count; i++) {
+        uint8_t capability = capabilities.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING) {
+            mSupportOfflineProcessing = true;
+        }
+    }
+
+    mInterface = new AidlHalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
+
+    std::string providerType;
+    mVendorTagId = manager->getProviderTagIdLocked(mId.string());
+    mTagMonitor.initialize(mVendorTagId);
+    if (!monitorTags.isEmpty()) {
+        mTagMonitor.parseTagsToMonitor(String8(monitorTags));
+    }
+
+    for (size_t i = 0; i < capabilities.count; i++) {
+        uint8_t capability = capabilities.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
+            mNeedFixupMonochromeTags = true;
+        }
+    }
+
+    return initializeCommonLocked();
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::AidlCameraDeviceCallbacks::processCaptureResult(
+            const std::vector<camera::device::CaptureResult>& results) {
+    sp<AidlCamera3Device> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->processCaptureResult(results);
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::AidlCameraDeviceCallbacks::notify(
+        const std::vector<camera::device::NotifyMsg>& msgs) {
+    sp<AidlCamera3Device> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->notify(msgs);
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::processCaptureResult(
+            const std::vector<camera::device::CaptureResult>& results) {
+    // Ideally we should grab mLock, but that can lead to deadlock, and
+    // it's not super important to get up to date value of mStatus for this
+    // warning print, hence skipping the lock here
+    if (mStatus == STATUS_ERROR) {
+        // Per API contract, HAL should act as closed after device error
+        // But mStatus can be set to error by framework as well, so just log
+        // a warning here.
+        ALOGW("%s: received capture result in error state.", __FUNCTION__);
+    }
+
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
+    if (mProcessCaptureResultLock.tryLock() != OK) {
+        // This should never happen; it indicates a wrong client implementation
+        // that doesn't follow the contract. But, we can be tolerant here.
+        ALOGE("%s: callback overlapped! waiting 1s...",
+                __FUNCTION__);
+        if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
+            ALOGE("%s: cannot acquire lock in 1s, dropping results",
+                    __FUNCTION__);
+            // really don't know what to do, so bail out.
+            return ::ndk::ScopedAStatus::ok();
+        }
+    }
+    AidlCaptureOutputStates states {
+       {
+        mId,
+        mInFlightLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+    };
+
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result, result.physicalCameraMetadata);
+    }
+    mProcessCaptureResultLock.unlock();
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::notify(
+        const std::vector<camera::device::NotifyMsg>& msgs) {
+    // Ideally we should grab mLock, but that can lead to deadlock, and
+    // it's not super important to get up to date value of mStatus for this
+    // warning print, hence skipping the lock here
+    if (mStatus == STATUS_ERROR) {
+        // Per API contract, HAL should act as closed after device error
+        // But mStatus can be set to error by framework as well, so just log
+        // a warning here.
+        ALOGW("%s: received notify message in error state.", __FUNCTION__);
+    }
+
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
+    AidlCaptureOutputStates states {
+      { mId,
+        mInFlightLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+    };
+    for (const auto& msg : msgs) {
+        camera3::notify(states, msg);
+    }
+    return ::ndk::ScopedAStatus::ok();
+
+}
+
+status_t AidlCamera3Device::switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/ sp<CameraOfflineSessionBase>* session) {
+    ATRACE_CALL();
+    if (session == nullptr) {
+        ALOGE("%s: session must not be null", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock il(mInterfaceLock);
+
+    bool hasInputStream = mInputStream != nullptr;
+    int32_t inputStreamId = hasInputStream ? mInputStream->getId() : -1;
+    bool inputStreamSupportsOffline = hasInputStream ?
+            mInputStream->getOfflineProcessingSupport() : false;
+    auto outputStreamIds = mOutputStreams.getStreamIds();
+    auto streamIds = outputStreamIds;
+    if (hasInputStream) {
+        streamIds.push_back(mInputStream->getId());
+    }
+
+    // Check all streams in streamsToKeep supports offline mode
+    for (auto id : streamsToKeep) {
+        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+            ALOGE("%s: Unknown stream ID %d", __FUNCTION__, id);
+            return BAD_VALUE;
+        } else if (id == inputStreamId) {
+            if (!inputStreamSupportsOffline) {
+                ALOGE("%s: input stream %d cannot be switched to offline",
+                        __FUNCTION__, id);
+                return BAD_VALUE;
+            }
+        } else {
+            sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
+            if (!stream->getOfflineProcessingSupport()) {
+                ALOGE("%s: output stream %d cannot be switched to offline",
+                        __FUNCTION__, id);
+                return BAD_VALUE;
+            }
+        }
+    }
+    // TODO: block surface sharing and surface group streams until we can support them
+
+    // Stop repeating request, wait until all remaining requests are submitted, then call into
+    // HAL switchToOffline
+    camera::device::CameraOfflineSessionInfo offlineSessionInfo;
+    std::shared_ptr<camera::device::ICameraOfflineSession> offlineSession;
+    camera3::BufferRecords bufferRecords;
+    status_t ret = static_cast<AidlRequestThread *>(mRequestThread.get())->switchToOffline(
+            streamsToKeep, &offlineSessionInfo, &offlineSession, &bufferRecords);
+
+    if (ret != OK) {
+        SET_ERR("Switch to offline failed: %s (%d)", strerror(-ret), ret);
+        return ret;
+    }
+
+    bool succ = mRequestBufferSM.onSwitchToOfflineSuccess();
+    if (!succ) {
+        SET_ERR("HAL must not be calling requestStreamBuffers call");
+        // TODO: block ALL callbacks from HAL till app configured new streams?
+        return UNKNOWN_ERROR;
+    }
+
+    // Verify offlineSessionInfo
+    std::vector<int32_t> offlineStreamIds;
+    offlineStreamIds.reserve(offlineSessionInfo.offlineStreams.size());
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        // verify stream IDs
+        int32_t id = offlineStream.id;
+        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+            SET_ERR("stream ID %d not found!", id);
+            return UNKNOWN_ERROR;
+        }
+
+        // When not using HAL buf manager, only allow streams requested by app to be preserved
+        if (!mUseHalBufManager) {
+            if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
+                SET_ERR("stream ID %d must not be switched to offline!", id);
+                return UNKNOWN_ERROR;
+            }
+        }
+
+        offlineStreamIds.push_back(id);
+        sp<Camera3StreamInterface> stream = (id == inputStreamId) ?
+                static_cast<sp<Camera3StreamInterface>>(mInputStream) :
+                static_cast<sp<Camera3StreamInterface>>(mOutputStreams.get(id));
+        // Verify number of outstanding buffers
+        if (stream->getOutstandingBuffersCount() != (uint32_t)offlineStream.numOutstandingBuffers) {
+            SET_ERR("Offline stream %d # of remaining buffer mismatch: (%zu,%d) (service/HAL)",
+                    id, stream->getOutstandingBuffersCount(), offlineStream.numOutstandingBuffers);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Verify all streams to be deleted don't have any outstanding buffers
+    if (hasInputStream && std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+                inputStreamId) == offlineStreamIds.end()) {
+        if (mInputStream->hasOutstandingBuffers()) {
+            SET_ERR("Input stream %d still has %zu outstanding buffer!",
+                    inputStreamId, mInputStream->getOutstandingBuffersCount());
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    for (const auto& outStreamId : outputStreamIds) {
+        if (std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+                outStreamId) == offlineStreamIds.end()) {
+            auto outStream = mOutputStreams.get(outStreamId);
+            if (outStream->hasOutstandingBuffers()) {
+                SET_ERR("Output stream %d still has %zu outstanding buffer!",
+                        outStreamId, outStream->getOutstandingBuffersCount());
+                return UNKNOWN_ERROR;
+            }
+        }
+    }
+
+    InFlightRequestMap offlineReqs;
+    // Verify inflight requests and their pending buffers
+    {
+        std::lock_guard<std::mutex> l(mInFlightLock);
+        for (auto offlineReq : offlineSessionInfo.offlineRequests) {
+            int idx = mInFlightMap.indexOfKey(offlineReq.frameNumber);
+            if (idx == NAME_NOT_FOUND) {
+                SET_ERR("Offline request frame number %d not found!", offlineReq.frameNumber);
+                return UNKNOWN_ERROR;
+            }
+
+            const auto& inflightReq = mInFlightMap.valueAt(idx);
+            // TODO: check specific stream IDs
+            size_t numBuffersLeft = static_cast<size_t>(inflightReq.numBuffersLeft);
+            if (numBuffersLeft != offlineReq.pendingStreams.size()) {
+                SET_ERR("Offline request # of remaining buffer mismatch: (%d,%d) (service/HAL)",
+                        inflightReq.numBuffersLeft, offlineReq.pendingStreams.size());
+                return UNKNOWN_ERROR;
+            }
+            offlineReqs.add(offlineReq.frameNumber, inflightReq);
+        }
+    }
+
+    // Create Camera3OfflineSession and transfer object ownership
+    //   (streams, inflight requests, buffer caches)
+    camera3::StreamSet offlineStreamSet;
+    sp<camera3::Camera3Stream> inputStream;
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        int32_t id = offlineStream.id;
+        if (mInputStream != nullptr && id == mInputStream->getId()) {
+            inputStream = mInputStream;
+        } else {
+            offlineStreamSet.add(id, mOutputStreams.get(id));
+        }
+    }
+
+    // TODO: check if we need to lock before copying states
+    //       though technically no other thread should be talking to Camera3Device at this point
+    Camera3OfflineStates offlineStates(
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
+            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
+            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
+            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
+            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
+            mZoomRatioMappers, mRotateAndCropMappers);
+
+    *session = new AidlCamera3OfflineSession(mId, inputStream, offlineStreamSet,
+            std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+
+    // Delete all streams that has been transferred to offline session
+    Mutex::Autolock l(mLock);
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        int32_t id = offlineStream.id;
+        if (mInputStream != nullptr && id == mInputStream->getId()) {
+            mInputStream.clear();
+        } else {
+            mOutputStreams.remove(id);
+        }
+    }
+
+    // disconnect all other streams and switch to UNCONFIGURED state
+    if (mInputStream != nullptr) {
+        ret = mInputStream->disconnect();
+        if (ret != OK) {
+            SET_ERR_L("disconnect input stream failed!");
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    for (auto streamId : mOutputStreams.getStreamIds()) {
+        sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
+        ret = stream->disconnect();
+        if (ret != OK) {
+            SET_ERR_L("disconnect output stream %d failed!", streamId);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    mInputStream.clear();
+    mOutputStreams.clear();
+    mNeedConfig = true;
+    internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+    mOperatingMode = NO_MODE;
+    mIsConstrainedHighSpeedConfiguration = false;
+    mRequestThread->clearPreviousRequest();
+
+    return OK;
+    // TO be done by CameraDeviceClient/Camera3OfflineSession
+    // register the offline client to camera service
+    // Setup result passthing threads etc
+    // Initialize offline session so HAL can start sending callback to it (result Fmq)
+    // TODO: check how many onIdle callback will be sent
+    // Java side to make sure the CameraCaptureSession is properly closed
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::AidlCameraDeviceCallbacks::requestStreamBuffers(
+        const std::vector<camera::device::BufferRequest>& bufReqs,
+        std::vector<aidl::android::hardware::camera::device::StreamBufferRet>* outBuffers,
+        aidl::android::hardware::camera::device::BufferRequestStatus* status) {
+
+    sp<AidlCamera3Device> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->requestStreamBuffers(bufReqs, outBuffers, status);
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::requestStreamBuffers(
+        const std::vector<camera::device::BufferRequest>& bufReqs,
+        std::vector<aidl::android::hardware::camera::device::StreamBufferRet>* outBuffers,
+        aidl::android::hardware::camera::device::BufferRequestStatus* status) {
+
+    RequestBufferStates states {
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
+        mSessionStatsBuilder, *this, *(mInterface), *this};
+    camera3::requestStreamBuffers(states, bufReqs, outBuffers, status);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::AidlCameraDeviceCallbacks::returnStreamBuffers(
+        const std::vector<camera::device::StreamBuffer>& buffers) {
+    sp<AidlCamera3Device> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->returnStreamBuffers(buffers);
+}
+
+::ndk::ScopedAStatus AidlCamera3Device::returnStreamBuffers(
+        const std::vector<camera::device::StreamBuffer>& buffers) {
+    ReturnBufferStates states {
+        mId, mUseHalBufManager, mOutputStreams,  mSessionStatsBuilder,
+        *(mInterface)};
+    camera3::returnStreamBuffers(states, buffers);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+AidlCamera3Device::AidlHalInterface::AidlHalInterface(
+            std::shared_ptr<aidl::android::hardware::camera::device::ICameraDeviceSession> &session,
+            std::shared_ptr<AidlRequestMetadataQueue> queue,
+            bool useHalBufManager, bool supportOfflineProcessing) :
+        HalInterface(useHalBufManager, supportOfflineProcessing),
+        mAidlSession(session),
+        mRequestMetadataQueue(queue) { }
+
+AidlCamera3Device::AidlHalInterface::AidlHalInterface(
+            std::shared_ptr<aidl::android::hardware::camera::device::ICameraDeviceSession>
+                    &deviceSession,
+            std::shared_ptr<aidl::android::hardware::camera::device::ICameraInjectionSession>
+                    &injectionSession, std::shared_ptr<AidlRequestMetadataQueue> queue,
+            bool useHalBufManager, bool supportOfflineProcessing) :
+        HalInterface(useHalBufManager, supportOfflineProcessing),
+        mAidlSession(deviceSession),
+        mAidlInjectionSession(injectionSession),
+        mRequestMetadataQueue(queue) { }
+
+bool AidlCamera3Device::AidlHalInterface::valid() {
+    return (mAidlSession != nullptr);
+}
+
+void AidlCamera3Device::AidlHalInterface::clear() {
+    mAidlSession.reset();
+}
+
+status_t AidlCamera3Device::AidlHalInterface::flush() {
+    ATRACE_NAME("CameraHal::flush");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    auto err = mAidlSession->flush();
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+        res = AidlProviderInfo::mapToStatusT(err);
+    }
+
+    return res;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::dump(int /*fd*/) {
+    ATRACE_NAME("CameraHal::dump");
+    if (!valid()) return INVALID_OPERATION;
+
+    // Handled by CameraProviderManager::dump
+
+    return OK;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::repeatingRequestEnd(uint32_t frameNumber,
+        const std::vector<int32_t> &streamIds) {
+    ATRACE_NAME("AidlCameraHal::repeatingRequestEnd");
+    if (!valid()) return INVALID_OPERATION;
+
+    auto err = mAidlSession->repeatingRequestEnd(frameNumber, streamIds);
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+        return AidlProviderInfo::mapToStatusT(err);
+    }
+
+    return OK;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::close() {
+    ATRACE_NAME("CameraHal::close()");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    auto err = mAidlSession->close();
+    // Interface will be dead shortly anyway, so don't log errors
+    if (!err.isOk()) {
+        res = DEAD_OBJECT;
+    }
+
+    return res;
+}
+
+void AidlCamera3Device::AidlHalInterface::signalPipelineDrain(const std::vector<int>& streamIds) {
+    ATRACE_NAME("CameraHal::signalPipelineDrain");
+    if (!valid()) {
+        ALOGE("%s called on invalid camera!", __FUNCTION__);
+        return;
+    }
+
+    auto err = mAidlSession->signalStreamFlush(streamIds, mNextStreamConfigCounter - 1);
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+        return;
+    }
+}
+
+bool AidlCamera3Device::AidlHalInterface::isReconfigurationRequired(
+        CameraMetadata& oldSessionParams, CameraMetadata& newSessionParams) {
+    // We do reconfiguration by default;
+    bool required = true;
+    if (mIsReconfigurationQuerySupported) {
+        aidl::android::hardware::camera::device::CameraMetadata oldParams, newParams;
+        camera_metadata_t* oldSessionMeta = const_cast<camera_metadata_t*>(
+                oldSessionParams.getAndLock());
+        uint8_t *oldSessionByteP = reinterpret_cast<uint8_t*>(oldSessionMeta);
+
+        camera_metadata_t* newSessionMeta = const_cast<camera_metadata_t*>(
+                newSessionParams.getAndLock());
+        uint8_t *newSessionByteP = reinterpret_cast<uint8_t*>(newSessionMeta);
+        // std::vector has no setToExternal, so we hacve to copy
+        oldParams.metadata.assign(oldSessionByteP,
+                oldSessionByteP + get_camera_metadata_size(oldSessionMeta));
+        newParams.metadata.assign(newSessionByteP,
+                newSessionByteP + get_camera_metadata_size(newSessionMeta));
+        auto err = mAidlSession->isReconfigurationRequired(oldParams, newParams, &required);
+        oldSessionParams.unlock(oldSessionMeta);
+        newSessionParams.unlock(newSessionMeta);
+        if (!err.isOk()) {
+            ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, err.getMessage());
+            return true;
+        }
+    }
+
+    return required;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::constructDefaultRequestSettings(
+        camera_request_template_t templateId,
+        /*out*/ camera_metadata_t **requestTemplate) {
+    ATRACE_NAME("CameraAidlHal::constructDefaultRequestSettings");
+    using aidl::android::hardware::camera::device::RequestTemplate;
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    RequestTemplate id;
+    aidl::android::hardware::camera::device::CameraMetadata request;
+    switch (templateId) {
+        case CAMERA_TEMPLATE_PREVIEW:
+            id = RequestTemplate::PREVIEW;
+            break;
+        case CAMERA_TEMPLATE_STILL_CAPTURE:
+            id = RequestTemplate::STILL_CAPTURE;
+            break;
+        case CAMERA_TEMPLATE_VIDEO_RECORD:
+            id = RequestTemplate::VIDEO_RECORD;
+            break;
+        case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
+            id = RequestTemplate::VIDEO_SNAPSHOT;
+            break;
+        case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
+            id = RequestTemplate::ZERO_SHUTTER_LAG;
+            break;
+        case CAMERA_TEMPLATE_MANUAL:
+            id = RequestTemplate::MANUAL;
+            break;
+        default:
+            // Unknown template ID, or this HAL is too old to support it
+            return BAD_VALUE;
+    }
+    auto err = mAidlSession->constructDefaultRequestSettings(id, &request);
+
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+        return AidlProviderInfo::mapToStatusT(err);
+    }
+    const camera_metadata *r =
+            reinterpret_cast<const camera_metadata_t*>(request.metadata.data());
+    size_t expectedSize = request.metadata.size();
+    int ret = validate_camera_metadata_structure(r, &expectedSize);
+    if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
+        *requestTemplate = clone_camera_metadata(r);
+        if (*requestTemplate == nullptr) {
+            ALOGE("%s: Unable to clone camera metadata received from HAL",
+                    __FUNCTION__);
+            res = UNKNOWN_ERROR;
+        }
+    } else {
+        ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+        res = UNKNOWN_ERROR;
+    }
+
+    return res;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::configureStreams(
+    const camera_metadata_t *sessionParams,
+        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+    using camera::device::StreamType;
+    using camera::device::StreamConfigurationMode;
+
+    ATRACE_NAME("CameraHal::configureStreams");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    // Convert stream config to AIDL
+    std::set<int> activeStreams;
+    camera::device::StreamConfiguration requestedConfiguration;
+    requestedConfiguration.streams.resize(config->num_streams);
+    for (size_t i = 0; i < config->num_streams; i++) {
+        camera::device::Stream &dst = requestedConfiguration.streams[i];
+        camera3::camera_stream_t *src = config->streams[i];
+
+        Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        cam3stream->setBufferFreedListener(this);
+        int streamId = cam3stream->getId();
+        StreamType streamType;
+        switch (src->stream_type) {
+            case CAMERA_STREAM_OUTPUT:
+                streamType = StreamType::OUTPUT;
+                break;
+            case CAMERA_STREAM_INPUT:
+                streamType = StreamType::INPUT;
+                break;
+            default:
+                ALOGE("%s: Stream %d: Unsupported stream type %d",
+                        __FUNCTION__, streamId, config->streams[i]->stream_type);
+                return BAD_VALUE;
+        }
+        dst.id = streamId;
+        dst.streamType = streamType;
+        dst.width = src->width;
+        dst.height = src->height;
+        dst.usage = mapToAidlConsumerUsage(cam3stream->getUsage());
+        dst.rotation = mapToAidlStreamRotation((camera_stream_rotation_t) src->rotation);
+        dst.format = mapToAidlPixelFormat(cam3stream->isFormatOverridden() ?
+                    cam3stream->getOriginalFormat() : src->format);
+        dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
+                    cam3stream->getOriginalDataSpace() : src->data_space);
+
+        dst.bufferSize = bufferSizes[i];
+        if (src->physical_camera_id != nullptr) {
+            dst.physicalCameraId = src->physical_camera_id;
+        }
+        dst.groupId = cam3stream->getHalStreamGroupId();
+        dst.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst.sensorPixelModesUsed[j++] = static_cast<SensorPixelMode>(mode);
+        }
+        dst.dynamicRangeProfile = mapToAidlDynamicProfile(src->dynamic_range_profile);
+        dst.useCase = static_cast<ScalerAvailableStreamUseCases>(src->use_case);
+        activeStreams.insert(streamId);
+        // Create Buffer ID map if necessary
+        mBufferRecords.tryCreateBufferCache(streamId);
+    }
+    // remove BufferIdMap for deleted streams
+    mBufferRecords.removeInactiveBufferCaches(activeStreams);
+
+    StreamConfigurationMode operationMode;
+    res = mapToAidlStreamConfigurationMode(
+            (camera_stream_configuration_mode_t) config->operation_mode,
+            /*out*/ &operationMode);
+    if (res != OK) {
+        return res;
+    }
+    requestedConfiguration.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
+    uint8_t *sessionParamP =
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams));
+
+    // std::vector has no setToExternal, so we have to copy
+    requestedConfiguration.sessionParams.metadata.assign(
+                sessionParamP, sessionParamP + sessionParamSize);
+    requestedConfiguration.operationMode = operationMode;
+
+    // Invoke configureStreams
+    std::vector<camera::device::HalStream> finalConfiguration;
+
+    requestedConfiguration.streamConfigCounter = mNextStreamConfigCounter++;
+    requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
+    auto err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+        return AidlProviderInfo::mapToStatusT(err);
+    }
+
+    // And convert output stream configuration from AIDL
+
+    for (size_t i = 0; i < config->num_streams; i++) {
+        camera3::camera_stream_t *dst = config->streams[i];
+        int streamId = Camera3Stream::cast(dst)->getId();
+
+        // Start scan at i, with the assumption that the stream order matches
+        size_t realIdx = i;
+        bool found = false;
+        size_t halStreamCount = finalConfiguration.size();
+        for (size_t idx = 0; idx < halStreamCount; idx++) {
+            if (finalConfiguration[realIdx].id == streamId) {
+                found = true;
+                break;
+            }
+            realIdx = (realIdx >= halStreamCount - 1) ? 0 : realIdx + 1;
+        }
+        if (!found) {
+            ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+                    __FUNCTION__, streamId);
+            return INVALID_OPERATION;
+        }
+        camera::device::HalStream &src = finalConfiguration[realIdx];
+
+        Camera3Stream* dstStream = Camera3Stream::cast(dst);
+        int overrideFormat = mapToFrameworkFormat(src.overrideFormat);
+        android_dataspace overrideDataSpace = mapToFrameworkDataspace(src.overrideDataSpace);
+
+        dstStream->setOfflineProcessingSupport(src.supportOffline);
+
+        if (dstStream->getOriginalFormat() != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            dstStream->setFormatOverride(false);
+            dstStream->setDataSpaceOverride(false);
+            if (dst->format != overrideFormat) {
+                ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
+                        streamId, dst->format);
+            }
+            if (dst->data_space != overrideDataSpace) {
+                ALOGE("%s: Stream %d: DataSpace override not allowed for format 0x%x", __FUNCTION__,
+                        streamId, dst->format);
+            }
+        } else {
+            bool needFormatOverride =
+                    requestedConfiguration.streams[i].format != src.overrideFormat;
+            bool needDataspaceOverride =
+                    requestedConfiguration.streams[i].dataSpace != src.overrideDataSpace;
+            // Override allowed with IMPLEMENTATION_DEFINED
+            dstStream->setFormatOverride(needFormatOverride);
+            dstStream->setDataSpaceOverride(needDataspaceOverride);
+            dst->format = overrideFormat;
+            dst->data_space = overrideDataSpace;
+        }
+
+        if (dst->stream_type == CAMERA_STREAM_INPUT) {
+            if (static_cast<int64_t>(src.producerUsage) != 0) {
+                ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
+                        __FUNCTION__, streamId);
+                return INVALID_OPERATION;
+            }
+            dstStream->setUsage(
+                    mapConsumerToFrameworkUsage(src.consumerUsage));
+        } else {
+            // OUTPUT
+            if (static_cast<int64_t>(src.consumerUsage) != 0) {
+                ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
+                        __FUNCTION__, streamId);
+                return INVALID_OPERATION;
+            }
+            dstStream->setUsage(
+                    mapProducerToFrameworkUsage(src.producerUsage));
+        }
+        dst->max_buffers = src.maxBuffers;
+    }
+
+    return res;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::configureInjectedStreams(
+        const camera_metadata_t* sessionParams, camera_stream_configuration* config,
+        const std::vector<uint32_t>& bufferSizes,
+        const CameraMetadata& cameraCharacteristics) {
+    using camera::device::StreamType;
+    using camera::device::StreamConfigurationMode;
+
+    ATRACE_NAME("InjectionCameraHal::configureStreams");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    if (config->input_is_multi_resolution) {
+        ALOGE("%s: Injection camera device doesn't support multi-resolution input "
+                "stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    // Convert stream config to AIDL
+    std::set<int> activeStreams;
+    camera::device::StreamConfiguration requestedConfiguration;
+    requestedConfiguration.streams.resize(config->num_streams);
+    for (size_t i = 0; i < config->num_streams; i++) {
+        camera::device::Stream& dst = requestedConfiguration.streams[i];
+        camera3::camera_stream_t* src = config->streams[i];
+
+        Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        cam3stream->setBufferFreedListener(this);
+        int streamId = cam3stream->getId();
+        StreamType streamType;
+        switch (src->stream_type) {
+            case CAMERA_STREAM_OUTPUT:
+                streamType = StreamType::OUTPUT;
+                break;
+            case CAMERA_STREAM_INPUT:
+                streamType = StreamType::INPUT;
+                break;
+            default:
+                ALOGE("%s: Stream %d: Unsupported stream type %d", __FUNCTION__,
+                        streamId, config->streams[i]->stream_type);
+            return BAD_VALUE;
+        }
+        dst.id = streamId;
+        dst.streamType = streamType;
+        dst.width = src->width;
+        dst.height = src->height;
+        dst.usage = mapToAidlConsumerUsage(cam3stream->getUsage());
+        dst.rotation = mapToAidlStreamRotation((camera_stream_rotation_t)src->rotation);
+        dst.format =
+            mapToAidlPixelFormat(cam3stream->isFormatOverridden() ? cam3stream->getOriginalFormat()
+                    : src->format);
+        dst.dataSpace =
+            mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
+                    cam3stream->getOriginalDataSpace() : src->data_space);
+        dst.bufferSize = bufferSizes[i];
+        if (src->physical_camera_id != nullptr) {
+            dst.physicalCameraId = src->physical_camera_id;
+        }
+        dst.groupId = cam3stream->getHalStreamGroupId();
+        dst.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst.sensorPixelModesUsed[j++] = static_cast<SensorPixelMode>(mode);
+        }
+        activeStreams.insert(streamId);
+        // Create Buffer ID map if necessary
+        mBufferRecords.tryCreateBufferCache(streamId);
+    }
+    // remove BufferIdMap for deleted streams
+    mBufferRecords.removeInactiveBufferCaches(activeStreams);
+
+    StreamConfigurationMode operationMode;
+    res = mapToAidlStreamConfigurationMode(
+            (camera_stream_configuration_mode_t)config->operation_mode,
+            /*out*/ &operationMode);
+    if (res != OK) {
+        return res;
+    }
+    requestedConfiguration.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
+    uint8_t *sessionParamP =
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams));
+    requestedConfiguration.operationMode = operationMode;
+    requestedConfiguration.sessionParams.metadata.assign(
+            sessionParamP, sessionParamP + sessionParamSize);
+
+    // See which version of HAL we have
+    if (mAidlInjectionSession != nullptr) {
+        requestedConfiguration.streamConfigCounter = mNextStreamConfigCounter++;
+        requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
+
+        const camera_metadata_t* rawMetadata = cameraCharacteristics.getAndLock();
+        uint8_t *aidlCharsP =
+                reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(rawMetadata));
+        aidl::android::hardware::camera::device::CameraMetadata aidlChars;
+        aidlChars.metadata.assign(aidlCharsP, aidlCharsP + get_camera_metadata_size(rawMetadata));
+        cameraCharacteristics.unlock(rawMetadata);
+
+        auto err = mAidlInjectionSession->configureInjectionStreams(requestedConfiguration,
+                aidlChars);
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+            return AidlProviderInfo::mapToStatusT(err);
+        }
+    } else {
+        ALOGE("%s: mAidlInjectionSession == nullptr, the injection not supported ", __FUNCTION__);
+        return INVALID_OPERATION;
+   }
+
+    return res;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::processBatchCaptureRequests(
+        std::vector<camera_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
+    ATRACE_NAME("CameraHal::processBatchCaptureRequests");
+    if (!valid()) return INVALID_OPERATION;
+
+    std::vector<camera::device::CaptureRequest> captureRequests;
+    size_t batchSize = requests.size();
+    if (batchSize > INT_MAX) {
+        ALOGE("%s batchSize %zu > INT_MAX, aidl interface cannot handle batch size", __FUNCTION__,
+                  batchSize);
+        return BAD_VALUE;
+    }
+    captureRequests.resize(batchSize);
+    std::vector<native_handle_t*> handlesCreated;
+    std::vector<std::pair<int32_t, int32_t>> inflightBuffers;
+
+    status_t res = OK;
+    for (size_t i = 0; i < batchSize; i++) {
+       res = wrapAsAidlRequest(requests[i], /*out*/&captureRequests[i],
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
+
+        if (res != OK) {
+            mBufferRecords.popInflightBuffers(inflightBuffers);
+            cleanupNativeHandles(&handlesCreated);
+            return res;
+        }
+    }
+
+    std::vector<camera::device::BufferCache> cachesToRemove;
+    {
+        std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+        for (auto& pair : mFreedBuffers) {
+            // The stream might have been removed since onBufferFreed
+            if (mBufferRecords.isStreamCached(pair.first)) {
+                cachesToRemove.push_back({pair.first, static_cast<int64_t>(pair.second)});
+            }
+        }
+        mFreedBuffers.clear();
+    }
+
+    *numRequestProcessed = 0;
+
+    // Write metadata to FMQ.
+    for (size_t i = 0; i < batchSize; i++) {
+        camera_capture_request_t* request = requests[i];
+        camera::device::CaptureRequest* captureRequest;
+        captureRequest = &captureRequests[i];
+
+        if (request->settings != nullptr) {
+            size_t settingsSize = get_camera_metadata_size(request->settings);
+            if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
+                    reinterpret_cast<const int8_t*>(request->settings), settingsSize)) {
+                captureRequest->settings.metadata.resize(0);
+                captureRequest->fmqSettingsSize = settingsSize;
+            } else {
+                if (mRequestMetadataQueue != nullptr) {
+                    ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
+                }
+                uint8_t *settingsP =
+                        reinterpret_cast<uint8_t*>(
+                                const_cast<camera_metadata_t*>(request->settings));
+                size_t settingsSize =  get_camera_metadata_size(request->settings);
+                captureRequest->settings.metadata.assign(settingsP, settingsP + settingsSize);
+                captureRequest->fmqSettingsSize = 0u;
+            }
+        } else {
+            // A null request settings maps to a size-0 CameraMetadata
+            captureRequest->settings.metadata.resize(0);
+            captureRequest->fmqSettingsSize = 0u;
+        }
+
+        captureRequest ->inputWidth = request->input_width;
+        captureRequest->inputHeight = request->input_height;
+
+        std::vector<camera::device::PhysicalCameraSetting>& physicalCameraSettings =
+                captureRequest->physicalCameraSettings;
+        physicalCameraSettings.resize(request->num_physcam_settings);
+        for (size_t j = 0; j < request->num_physcam_settings; j++) {
+            if (request->physcam_settings != nullptr) {
+                size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
+                if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
+                            reinterpret_cast<const int8_t*>(request->physcam_settings[j]),
+                            settingsSize)) {
+                    physicalCameraSettings[j].settings.metadata.resize(0);
+                    physicalCameraSettings[j].fmqSettingsSize = settingsSize;
+                } else {
+                    if (mRequestMetadataQueue != nullptr) {
+                        ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
+                    }
+                    uint8_t *physicalSettingsP =
+                            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
+                                    request->physcam_settings[j]));
+                    physicalCameraSettings[j].settings.metadata.assign(physicalSettingsP,
+                            physicalSettingsP + settingsSize);
+                    physicalCameraSettings[j].fmqSettingsSize = 0u;
+                }
+            } else {
+                physicalCameraSettings[j].fmqSettingsSize = 0u;
+                physicalCameraSettings[j].settings.metadata.resize(0);
+            }
+            physicalCameraSettings[j].physicalCameraId = request->physcam_id[j];
+        }
+    }
+
+    int32_t numRequests = 0;
+    auto retS = mAidlSession->processCaptureRequest(captureRequests, cachesToRemove,
+            &numRequests);
+    if (!retS.isOk()) {
+        res = AidlProviderInfo::mapToStatusT(retS);
+    }
+    if (res == OK) {
+        if (numRequests < 0) {
+            res = INVALID_OPERATION;
+        } else {
+            *numRequestProcessed = static_cast<uint32_t>(numRequests);
+        }
+
+    }
+    if (res == OK && *numRequestProcessed == batchSize) {
+        if (mAidlSession->isRemote()) {
+            // Only close acquire fence FDs when the AIDL transaction succeeds (so the FDs have been
+            // sent to camera HAL processes)
+            cleanupNativeHandles(&handlesCreated, /*closeFd*/true);
+        } else {
+            // In passthrough mode the FDs are now owned by HAL
+            cleanupNativeHandles(&handlesCreated);
+        }
+    } else {
+        ALOGE("%s Error with processCaptureRequest %s ", __FUNCTION__, retS.getMessage());
+        mBufferRecords.popInflightBuffers(inflightBuffers);
+        cleanupNativeHandles(&handlesCreated);
+    }
+    return res;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::wrapAsAidlRequest(camera_capture_request_t* request,
+        /*out*/camera::device::CaptureRequest* captureRequest,
+        /*out*/std::vector<native_handle_t*>* handlesCreated,
+        /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers) {
+    using camera::device::BufferStatus;
+    using camera::device::StreamBuffer;
+    ATRACE_CALL();
+    if (captureRequest == nullptr || handlesCreated == nullptr || inflightBuffers == nullptr) {
+        ALOGE("%s: captureRequest (%p), handlesCreated (%p), and inflightBuffers(%p) "
+                "must not be null", __FUNCTION__, captureRequest, handlesCreated, inflightBuffers);
+        return BAD_VALUE;
+    }
+
+    captureRequest->frameNumber = request->frame_number;
+
+    captureRequest->fmqSettingsSize = 0;
+
+    {
+        if (request->input_buffer != nullptr) {
+            int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
+            buffer_handle_t buf = *(request->input_buffer->buffer);
+            auto pair = getBufferId(buf, streamId);
+            bool isNewBuffer = pair.first;
+            uint64_t bufferId = pair.second;
+            captureRequest->inputBuffer.streamId = streamId;
+            captureRequest->inputBuffer.bufferId = bufferId;
+            captureRequest->inputBuffer.buffer =
+                    (isNewBuffer) ? camera3::dupToAidlIfNotNull(buf) :
+                            aidl::android::hardware::common::NativeHandle();
+            captureRequest->inputBuffer.status = BufferStatus::OK;
+            native_handle_t *acquireFence = nullptr;
+            if (request->input_buffer->acquire_fence != -1) {
+                acquireFence = native_handle_create(1,0);
+                acquireFence->data[0] = request->input_buffer->acquire_fence;
+                handlesCreated->push_back(acquireFence);
+            }
+            // duping here is okay, in aidl ownership is not given to aidl_handle
+            captureRequest->inputBuffer.acquireFence = camera3::dupToAidlIfNotNull(acquireFence);
+            captureRequest->inputBuffer.releaseFence =
+                    aidl::android::hardware::common::NativeHandle();
+
+            mBufferRecords.pushInflightBuffer(captureRequest->frameNumber, streamId,
+                    request->input_buffer->buffer);
+            inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
+        } else {
+            captureRequest->inputBuffer.streamId = -1;
+            captureRequest->inputBuffer.bufferId = BUFFER_ID_NO_BUFFER;
+        }
+
+        captureRequest->outputBuffers.resize(request->num_output_buffers);
+        for (size_t i = 0; i < request->num_output_buffers; i++) {
+            const camera_stream_buffer_t *src = request->output_buffers + i;
+            StreamBuffer &dst = captureRequest->outputBuffers[i];
+            int32_t streamId = Camera3Stream::cast(src->stream)->getId();
+            if (src->buffer != nullptr) {
+                buffer_handle_t buf = *(src->buffer);
+                auto pair = getBufferId(buf, streamId);
+                bool isNewBuffer = pair.first;
+                dst.bufferId = pair.second;
+                dst.buffer = isNewBuffer ?
+                        camera3::dupToAidlIfNotNull(buf) :
+                                aidl::android::hardware::common::NativeHandle();
+                native_handle_t *acquireFence = nullptr;
+                if (src->acquire_fence != -1) {
+                    acquireFence = native_handle_create(1,0);
+                    acquireFence->data[0] = src->acquire_fence;
+                    handlesCreated->push_back(acquireFence);
+                }
+                dst.acquireFence = camera3::dupToAidlIfNotNull(acquireFence);
+            } else if (mUseHalBufManager) {
+                // HAL buffer management path
+                dst.bufferId = BUFFER_ID_NO_BUFFER;
+                dst.buffer = aidl::android::hardware::common::NativeHandle();
+                dst.acquireFence = aidl::android::hardware::common::NativeHandle();
+            } else {
+                ALOGE("%s: cannot send a null buffer in capture request!", __FUNCTION__);
+                return BAD_VALUE;
+            }
+            dst.streamId = streamId;
+            dst.status = BufferStatus::OK;
+            dst.releaseFence = aidl::android::hardware::common::NativeHandle();
+
+            // Output buffers are empty when using HAL buffer manager
+            if (!mUseHalBufManager) {
+                mBufferRecords.pushInflightBuffer(
+                        captureRequest->frameNumber, streamId, src->buffer);
+                inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
+            }
+        }
+    }
+    return OK;
+}
+
+status_t AidlCamera3Device::AidlHalInterface::switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/aidl::android::hardware::camera::device::CameraOfflineSessionInfo*
+                offlineSessionInfo,
+        /*out*/std::shared_ptr<aidl::android::hardware::camera::device::ICameraOfflineSession>*
+                offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords) {
+    ATRACE_NAME("CameraHal::switchToOffline");
+    if (!valid()) {
+        ALOGE("%s called on invalid camera!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (offlineSessionInfo == nullptr || offlineSession == nullptr || bufferRecords == nullptr) {
+        ALOGE("%s: output arguments must not be null!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    auto err = mAidlSession->switchToOffline(streamsToKeep, offlineSessionInfo, offlineSession);
+
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
+        return AidlProviderInfo::mapToStatusT(err);
+    }
+
+    return verifyBufferCaches(offlineSessionInfo, bufferRecords);
+}
+
+AidlCamera3Device::AidlRequestThread::AidlRequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
+                sp<HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute) :
+          RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
+                  supportCameraMute) {}
+
+status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/camera::device::CameraOfflineSessionInfo* offlineSessionInfo,
+        /*out*/std::shared_ptr<camera::device::ICameraOfflineSession>* offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords) {
+    Mutex::Autolock l(mRequestLock);
+    clearRepeatingRequestsLocked(/*lastFrameNumber*/nullptr);
+
+    // Wait until request thread is fully stopped
+    // TBD: check if request thread is being paused by other APIs (shouldn't be)
+
+    // We could also check for mRepeatingRequests.empty(), but the API interface
+    // is serialized by Camera3Device::mInterfaceLock so no one should be able to submit any
+    // new requests during the call; hence skip that check.
+    bool queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
+    while (!queueEmpty) {
+        status_t res = mRequestSubmittedSignal.waitRelative(mRequestLock, kRequestSubmitTimeout);
+        if (res == TIMED_OUT) {
+            ALOGE("%s: request thread failed to submit one request within timeout!", __FUNCTION__);
+            return res;
+        } else if (res != OK) {
+            ALOGE("%s: request thread failed to submit a request: %s (%d)!",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+        queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
+    }
+    return (static_cast<AidlHalInterface *>(mInterface.get()))->switchToOffline(
+            streamsToKeep, offlineSessionInfo, offlineSession, bufferRecords);
+}
+
+status_t AidlCamera3Device::AidlCamera3DeviceInjectionMethods::injectionInitialize(
+        const String8& injectedCamId, sp<CameraProviderManager> manager,
+        const std::shared_ptr<camera::device::ICameraDeviceCallback>&callback) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mInjectionLock);
+
+    if (manager == nullptr) {
+        ALOGE("%s: manager does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (parent->getTransportType() != IPCTransport::AIDL) {
+        ALOGE("%s Parent transport not AIDL for injected camera id %s, aborting", __FUNCTION__,
+                  injectedCamId.c_str());
+        return INVALID_OPERATION;
+    }
+    mInjectedCamId = injectedCamId;
+    std::shared_ptr<camera::device::ICameraInjectionSession> injectionSession;
+    ATRACE_BEGIN("Injection CameraHal::openSession");
+    status_t res = manager->openAidlInjectionSession(injectedCamId.string(), callback,
+                                          /*out*/ &injectionSession);
+    ATRACE_END();
+    if (res != OK) {
+        ALOGE("Injection camera could not open camera session: %s (%d)",
+                strerror(-res), res);
+        return res;
+    }
+    std::shared_ptr<camera::device::ICameraDeviceSession> deviceSession = nullptr;
+    auto ret = injectionSession->getCameraDeviceSession(&deviceSession);
+    if (!ret.isOk() || deviceSession == nullptr) {
+        ALOGE("%s Camera injection session couldn't return ICameraDeviceSession", __FUNCTION__);
+        return AidlProviderInfo::mapToStatusT(ret);
+    }
+
+    std::shared_ptr<AidlRequestMetadataQueue> queue;
+    ::aidl::android::hardware::common::fmq::MQDescriptor<
+            int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> desc;
+
+    ::ndk::ScopedAStatus requestQueueRet = deviceSession->getCaptureRequestMetadataQueue(&desc);
+    if (!requestQueueRet.isOk()) {
+        ALOGE("Injection camera transaction error when getting result metadata queue from camera"
+                " session: %s", requestQueueRet.getMessage());
+        return AidlProviderInfo::mapToStatusT(requestQueueRet);
+    }
+    queue = std::make_unique<AidlRequestMetadataQueue>(desc);
+    if (!queue->isValid() || queue->availableToWrite() <= 0) {
+        ALOGE("HAL returns empty result metadata fmq, not use it");
+        queue = nullptr;
+        // Don't use resQueue onwards.
+    }
+
+    std::unique_ptr<AidlResultMetadataQueue>& resQueue = mInjectionResultMetadataQueue;
+    ::aidl::android::hardware::common::fmq::MQDescriptor<
+        int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> resDesc;
+    ::ndk::ScopedAStatus resultQueueRet = deviceSession->getCaptureResultMetadataQueue(&resDesc);
+    if (!resultQueueRet.isOk()) {
+        ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                resultQueueRet.getMessage());
+        return AidlProviderInfo::mapToStatusT(resultQueueRet);
+    }
+    resQueue = std::make_unique<AidlResultMetadataQueue>(resDesc);
+    if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+        ALOGE("HAL returns empty result metadata fmq, not use it");
+        resQueue = nullptr;
+        // Don't use resQueue onwards.
+    }
+
+    ALOGV("%s: Injection camera interface = new HalInterface()", __FUNCTION__);
+
+    mInjectedCamHalInterface =
+            new AidlHalInterface(deviceSession, injectionSession, queue, parent->mUseHalBufManager,
+                       parent->mSupportOfflineProcessing);
+    if (mInjectedCamHalInterface == nullptr) {
+        ALOGE("%s: mInjectedCamHalInterface does not exist!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    return OK;
+}
+
+status_t AidlCamera3Device::AidlCamera3DeviceInjectionMethods::replaceHalInterface(
+        sp<HalInterface> newHalInterface, bool keepBackup) {
+    Mutex::Autolock lock(mInjectionLock);
+    if (newHalInterface.get() == nullptr) {
+        ALOGE("%s: The newHalInterface does not exist, to stop replacing.",
+                __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (parent->getTransportType() != IPCTransport::AIDL ||
+            newHalInterface->getTransportType() != IPCTransport::AIDL) {
+        ALOGE("%s Parent transport not AIDL for replacing hal interface", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    AidlCamera3Device *aidlParent = static_cast<AidlCamera3Device *>(parent.get());
+    if (keepBackup) {
+        if (mBackupHalInterface == nullptr) {
+            mBackupHalInterface = parent->mInterface;
+        }
+        if (mBackupResultMetadataQueue == nullptr) {
+            mBackupResultMetadataQueue = std::move(aidlParent->mResultMetadataQueue);
+            aidlParent->mResultMetadataQueue = std::move(mInjectionResultMetadataQueue);
+        }
+    } else {
+        mBackupHalInterface = nullptr;
+        aidlParent->mResultMetadataQueue = std::move(mBackupResultMetadataQueue);
+        mBackupResultMetadataQueue = nullptr;
+    }
+    parent->mInterface = newHalInterface;
+    return OK;
+}
+
+status_t AidlCamera3Device::injectionCameraInitialize(const String8 &injectedCamId,
+            sp<CameraProviderManager> manager) {
+        return (static_cast<AidlCamera3DeviceInjectionMethods *>
+                    (mInjectionMethods.get()))->injectionInitialize(injectedCamId, manager,
+                        std::shared_ptr<camera::device::ICameraDeviceCallback>(mCallbacks));
+};
+
+sp<Camera3Device::RequestThread> AidlCamera3Device::createNewRequestThread(
+                wp<Camera3Device> parent, sp<camera3::StatusTracker> statusTracker,
+                sp<Camera3Device::HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute) {
+    return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
+            useHalBufManager, supportCameraMute);
+};
+
+sp<Camera3Device::Camera3DeviceInjectionMethods>
+AidlCamera3Device::createCamera3DeviceInjectionMethods(wp<Camera3Device> parent) {
+    return new AidlCamera3DeviceInjectionMethods(parent);
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
new file mode 100644
index 0000000..d20a7eb
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_AIDLCAMERA3DEVICE_H
+#define ANDROID_SERVERS_AIDLCAMERA3DEVICE_H
+
+#include "../Camera3Device.h"
+#include "AidlCamera3OutputUtils.h"
+#include <fmq/AidlMessageQueue.h>
+
+#include <aidl/android/hardware/camera/device/BnCameraDeviceCallback.h>
+#include <aidl/android/hardware/camera/device/ICameraDevice.h>
+#include <aidl/android/hardware/camera/device/ICameraInjectionSession.h>
+namespace android {
+
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+
+/**
+ * CameraDevice for AIDL HAL devices.
+ */
+class AidlCamera3Device :
+            public Camera3Device {
+  public:
+
+    using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+    class AidlCameraDeviceCallbacks;
+    friend class AidlCameraDeviceCallbacks;
+    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass,
+            bool legacyClient = false);
+
+    virtual ~AidlCamera3Device() { }
+
+    static aidl::android::hardware::graphics::common::PixelFormat mapToAidlPixelFormat(
+            int frameworkFormat);
+    static aidl::android::hardware::graphics::common::Dataspace mapToAidlDataspace(
+            android_dataspace dataSpace);
+    static aidl::android::hardware::graphics::common::BufferUsage mapToAidlConsumerUsage(
+            uint64_t usage);
+    static aidl::android::hardware::camera::device::StreamRotation
+            mapToAidlStreamRotation(camera_stream_rotation_t rotation);
+
+    static status_t mapToAidlStreamConfigurationMode(
+            camera_stream_configuration_mode_t operationMode,
+            aidl::android::hardware::camera::device::StreamConfigurationMode *mode);
+
+    static int mapToFrameworkFormat(
+        aidl::android::hardware::graphics::common::PixelFormat pixelFormat);
+    static android_dataspace mapToFrameworkDataspace(
+            aidl::android::hardware::graphics::common::Dataspace);
+    static uint64_t mapConsumerToFrameworkUsage(
+            aidl::android::hardware::graphics::common::BufferUsage usage);
+    static uint64_t mapProducerToFrameworkUsage(
+            aidl::android::hardware::graphics::common::BufferUsage usage);
+
+    virtual status_t switchToOffline(const std::vector<int32_t>& /*streamsToKeep*/,
+            /*out*/ sp<CameraOfflineSessionBase>* /*session*/) override;
+
+    status_t initialize(sp<CameraProviderManager> manager, const String8& monitorTags) override;
+    class AidlHalInterface : public Camera3Device::HalInterface {
+     public:
+        AidlHalInterface(std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraDeviceSession> &session,
+                std::shared_ptr<AidlRequestMetadataQueue> queue,
+                bool useHalBufManager, bool supportOfflineProcessing);
+        AidlHalInterface(
+                std::shared_ptr<aidl::android::hardware::camera::device::ICameraDeviceSession>
+                    &deviceSession,
+                std::shared_ptr<
+                aidl::android::hardware::camera::device::ICameraInjectionSession> &injectionSession,
+                std::shared_ptr<AidlRequestMetadataQueue> queue,
+                bool useHalBufManager, bool supportOfflineProcessing);
+
+        virtual IPCTransport getTransportType() const override {return IPCTransport::AIDL; }
+
+
+        // Returns true if constructed with a valid device or session, and not yet cleared
+        virtual bool valid() override;
+
+        // Reset this HalInterface object (does not call close())
+        virtual void clear() override;
+
+        // Caller takes ownership of requestTemplate
+        virtual status_t constructDefaultRequestSettings(camera_request_template templateId,
+                /*out*/ camera_metadata_t **requestTemplate) override;
+
+        virtual status_t configureStreams(const camera_metadata_t *sessionParams,
+                /*inout*/ camera_stream_configuration_t *config,
+                const std::vector<uint32_t>& bufferSizes) override;
+        // The injection camera configures the streams to hal.
+        virtual status_t configureInjectedStreams(
+                const camera_metadata_t* sessionParams,
+                /*inout*/ camera_stream_configuration_t* config,
+                const std::vector<uint32_t>& bufferSizes,
+                const CameraMetadata& cameraCharacteristics) override;
+
+        // When the call succeeds, the ownership of acquire fences in requests is transferred to
+        // HalInterface. More specifically, the current implementation will send the fence to
+        // HAL process and close the FD in cameraserver process. When the call fails, the ownership
+        // of the acquire fence still belongs to the caller.
+        virtual status_t processBatchCaptureRequests(
+                std::vector<camera_capture_request_t*>& requests,
+                /*out*/uint32_t* numRequestProcessed) override;
+
+        // Calls into the HAL interface
+        virtual status_t flush() override;
+        virtual status_t dump(int fd) override;
+        virtual status_t close() override;
+        virtual void signalPipelineDrain(const std::vector<int>& streamIds) override;
+        virtual bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
+                CameraMetadata& newSessionParams) override;
+
+        virtual status_t repeatingRequestEnd(uint32_t ,
+                const std::vector<int32_t> &) override;
+
+        status_t switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/aidl::android::hardware::camera::device::CameraOfflineSessionInfo*
+                offlineSessionInfo,
+        /*out*/std::shared_ptr<aidl::android::hardware::camera::device::ICameraOfflineSession>*
+                offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords);
+
+     private:
+
+        // Always valid
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraDeviceSession>
+                mAidlSession = nullptr;
+        //Valid for injection sessions
+        std::shared_ptr<aidl::android::hardware::camera::device::ICameraInjectionSession>
+                mAidlInjectionSession = nullptr;
+
+        status_t wrapAsAidlRequest(camera_capture_request_t* request,
+                /*out*/aidl::android::hardware::camera::device::CaptureRequest* captureRequest,
+                /*out*/std::vector<native_handle_t*>* handlesCreated,
+                /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers);
+
+        std::shared_ptr<AidlRequestMetadataQueue> mRequestMetadataQueue;
+    }; // class AidlHalInterface
+
+    /**
+     * Implementation of aidl::android::hardware::camera::device::ICameraDeviceCallback
+     */
+    ::ndk::ScopedAStatus processCaptureResult(
+            const std::vector<aidl::android::hardware::camera::device::CaptureResult>& results);
+    ::ndk::ScopedAStatus notify(
+            const std::vector<aidl::android::hardware::camera::device::NotifyMsg>& msgs);
+
+    ::ndk::ScopedAStatus requestStreamBuffers(
+            const std::vector<aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+            std::vector<aidl::android::hardware::camera::device::StreamBufferRet>* outBuffers,
+            aidl::android::hardware::camera::device::BufferRequestStatus* status);
+
+    ::ndk::ScopedAStatus returnStreamBuffers(
+            const std::vector<aidl::android::hardware::camera::device::StreamBuffer>& buffers);
+
+    class AidlRequestThread : public Camera3Device::RequestThread {
+      public:
+        AidlRequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
+                sp<HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute);
+
+        status_t switchToOffline(
+                const std::vector<int32_t>& streamsToKeep,
+                /*out*/aidl::android::hardware::camera::device::CameraOfflineSessionInfo*
+                        offlineSessionInfo,
+                /*out*/std::shared_ptr<
+                        aidl::android::hardware::camera::device::ICameraOfflineSession>*
+                                offlineSession,
+                /*out*/camera3::BufferRecords* bufferRecords);
+    }; // class AidlRequestThread
+
+    class AidlCamera3DeviceInjectionMethods : public Camera3DeviceInjectionMethods {
+     public:
+        // Initialize the injection camera and generate an hal interface.
+        status_t injectionInitialize(
+                const String8& injectedCamId, sp<CameraProviderManager> manager,
+                const std::shared_ptr<
+                    aidl::android::hardware::camera::device::ICameraDeviceCallback>&
+                    callback);
+        AidlCamera3DeviceInjectionMethods(wp<Camera3Device> parent) :
+                Camera3DeviceInjectionMethods(parent) { };
+        ~AidlCamera3DeviceInjectionMethods() {}
+     private:
+        // Backup of the original camera hal result FMQ.
+        std::unique_ptr<AidlResultMetadataQueue> mBackupResultMetadataQueue;
+
+        // FMQ writes the result for the injection camera. Must be guarded by
+        // mProcessCaptureResultLock.
+        std::unique_ptr<AidlResultMetadataQueue> mInjectionResultMetadataQueue;
+
+        // Use injection camera hal interface to replace and backup original
+        // camera hal interface.
+        virtual status_t replaceHalInterface(sp<HalInterface> newHalInterface,
+                bool keepBackup) override;
+    };
+
+    // We need a separate class which inherits from AIDL ICameraDeviceCallbacks
+    // since we use the ndk backend for AIDL HAL interfaces. The ndk backend of
+    // ICameraDeviceCallbacks doesn't support sp<> (since it doesn't inherit
+    // from RefBase).
+    // As a result we can't write sp<Camera3Device> = new AidlCamera3Device(...).
+    // It supports std::shared_ptr instead. Other references to
+    // Camera3Device in cameraserver use sp<> widely, so to keep supporting
+    // that, we create a new class which will be managed through std::shared_ptr
+    // internally by AidlCamera3Device.
+    class AidlCameraDeviceCallbacks :
+            public aidl::android::hardware::camera::device::BnCameraDeviceCallback {
+      public:
+
+        AidlCameraDeviceCallbacks(wp<AidlCamera3Device> parent) : mParent(parent)  { }
+        ~AidlCameraDeviceCallbacks() { }
+        ::ndk::ScopedAStatus processCaptureResult(
+                const std::vector<
+                        aidl::android::hardware::camera::device::CaptureResult>& results) override;
+        ::ndk::ScopedAStatus notify(
+                const std::vector<
+                        aidl::android::hardware::camera::device::NotifyMsg>& msgs) override;
+
+        ::ndk::ScopedAStatus requestStreamBuffers(
+                const std::vector<
+                        aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+                std::vector<aidl::android::hardware::camera::device::StreamBufferRet>* outBuffers,
+                aidl::android::hardware::camera::device::BufferRequestStatus* status) override;
+
+        ::ndk::ScopedAStatus returnStreamBuffers(
+                const std::vector<
+                        aidl::android::hardware::camera::device::StreamBuffer>& buffers) override;
+        private:
+            wp<AidlCamera3Device> mParent = nullptr;
+    };
+
+  private:
+    virtual status_t injectionCameraInitialize(const String8 &injectCamId,
+            sp<CameraProviderManager> manager) override;
+
+    virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
+                sp<HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute) override;
+
+    virtual sp<Camera3DeviceInjectionMethods>
+            createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
+
+    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+    std::unique_ptr<AidlResultMetadataQueue> mResultMetadataQueue = nullptr;
+
+    std::shared_ptr<AidlCameraDeviceCallbacks> mCallbacks = nullptr;
+
+
+}; // class AidlCamera3Device
+
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
new file mode 100644
index 0000000..336719d
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCamera3-OffLnSsn"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <inttypes.h>
+
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include "device3/aidl/AidlCamera3OfflineSession.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/aidl/AidlCamera3OutputUtils.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3SharedOutputStream.h"
+#include "utils/CameraTraces.h"
+
+using namespace android::camera3;
+using namespace aidl::android::hardware;
+
+namespace android {
+
+
+AidlCamera3OfflineSession::~AidlCamera3OfflineSession() {
+    ATRACE_CALL();
+    ALOGV("%s: Tearing down aidl offline session for camera id %s", __FUNCTION__, mId.string());
+    AidlCamera3OfflineSession::disconnectSession();
+}
+
+status_t AidlCamera3OfflineSession::initialize(wp<NotificationListener> listener) {
+    ATRACE_CALL();
+
+    if (mSession == nullptr) {
+        ALOGE("%s: AIDL session is null!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+
+        mListener = listener;
+
+        // setup result FMQ
+        std::unique_ptr<AidlResultMetadataQueue>& resQueue = mResultMetadataQueue;
+        ::aidl::android::hardware::common::fmq::MQDescriptor<
+            int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> desc;
+        ::ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&desc);
+        if (!resultQueueRet.isOk()) {
+            ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                    resultQueueRet.getMessage());
+            return DEAD_OBJECT;
+        }
+        resQueue = std::make_unique<AidlResultMetadataQueue>(desc);
+        if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+            ALOGE("HAL returns empty result metadata fmq, not use it");
+            resQueue = nullptr;
+            // Don't use resQueue onwards.
+        }
+
+        mStatus = STATUS_ACTIVE;
+    }
+
+    mSession->setCallback(mCallbacks);
+
+    return OK;
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::AidlCameraDeviceCallbacks::processCaptureResult(
+        const std::vector<camera::device::CaptureResult>& results) {
+    sp<AidlCamera3OfflineSession> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->processCaptureResult(results);
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::processCaptureResult(
+        const std::vector<camera::device::CaptureResult>& results) {
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return ::ndk::ScopedAStatus::ok();
+        }
+        listener = mListener.promote();
+    }
+
+    AidlCaptureOutputStates states {
+      {mId,
+        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+    };
+
+    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result, result.physicalCameraMetadata);
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::AidlCameraDeviceCallbacks::notify(
+        const std::vector<camera::device::NotifyMsg>& msgs) {
+    sp<AidlCamera3OfflineSession> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->notify(msgs);
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::notify(
+        const std::vector<camera::device::NotifyMsg>& msgs) {
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return ::ndk::ScopedAStatus::ok();
+        }
+        listener = mListener.promote();
+    }
+
+    AidlCaptureOutputStates states {
+      {mId,
+        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+    };
+    for (const auto& msg : msgs) {
+        camera3::notify(states, msg);
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::AidlCameraDeviceCallbacks::requestStreamBuffers(
+        const std::vector<::aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+        std::vector<::aidl::android::hardware::camera::device::StreamBufferRet>* buffers,
+        ::aidl::android::hardware::camera::device::BufferRequestStatus* status) {
+    sp<AidlCamera3OfflineSession> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->requestStreamBuffers(bufReqs, buffers, status);
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::requestStreamBuffers(
+        const std::vector<::aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+        std::vector<::aidl::android::hardware::camera::device::StreamBufferRet>* buffers,
+        ::aidl::android::hardware::camera::device::BufferRequestStatus* status) {
+
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return ::ndk::ScopedAStatus::ok();
+        }
+    }
+
+    RequestBufferStates states {
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        *this, mBufferRecords, *this};
+    camera3::requestStreamBuffers(states, bufReqs, buffers, status);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::AidlCameraDeviceCallbacks::returnStreamBuffers(
+        const std::vector<camera::device::StreamBuffer>& buffers) {
+    sp<AidlCamera3OfflineSession> p = mParent.promote();
+    if (p == nullptr) {
+        ALOGE("%s Parent AidlCameraDevice not alive, can't process callbacks", __FUNCTION__);
+        return ::ndk::ScopedAStatus::ok();
+    }
+    return p->returnStreamBuffers(buffers);
+}
+
+::ndk::ScopedAStatus AidlCamera3OfflineSession::returnStreamBuffers(
+        const std::vector<camera::device::StreamBuffer>& buffers) {
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return ::ndk::ScopedAStatus::ok();
+        }
+    }
+
+    ReturnBufferStates states {
+        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mBufferRecords};
+
+    camera3::returnStreamBuffers(states, buffers);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+void AidlCamera3OfflineSession::disconnectSession() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mSession != nullptr) {
+      mSession->close();
+  }
+  mSession.reset();
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
new file mode 100644
index 0000000..33de2c5
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_AIDL_CAMERA3OFFLINESESSION_H
+#define ANDROID_SERVERS_AIDL_CAMERA3OFFLINESESSION_H
+
+#include <memory>
+#include <mutex>
+
+#include <utils/String8.h>
+#include <utils/String16.h>
+
+#include "AidlCamera3OutputUtils.h"
+#include <aidl/android/hardware/camera/device/BnCameraDeviceCallback.h>
+#include <aidl/android/hardware/camera/device/ICameraOfflineSession.h>
+
+#include <fmq/AidlMessageQueue.h>
+
+#include "common/CameraOfflineSessionBase.h"
+
+#include "device3/Camera3BufferManager.h"
+#include "device3/Camera3OfflineSession.h"
+#include "utils/TagMonitor.h"
+#include <camera_metadata_hidden.h>
+
+namespace android {
+
+namespace camera3 {
+
+class Camera3Stream;
+class Camera3OutputStreamInterface;
+class Camera3StreamInterface;
+
+} // namespace camera3
+
+/**
+ * AidlCamera3OfflineSession for offline session defined in AIDL ICameraOfflineSession
+ */
+class AidlCamera3OfflineSession :
+            public Camera3OfflineSession {
+  public:
+
+    virtual ~AidlCamera3OfflineSession();
+
+    virtual status_t initialize(wp<NotificationListener> listener) override;
+
+    /**
+     * Implementation of aidl::android::hardware::camera::device::ICameraDeviceCallback
+     */
+    ::ndk::ScopedAStatus processCaptureResult(
+            const std::vector<aidl::android::hardware::camera::device::CaptureResult>& results);
+    ::ndk::ScopedAStatus notify(
+            const std::vector<aidl::android::hardware::camera::device::NotifyMsg>& msgs);
+    ::ndk::ScopedAStatus requestStreamBuffers(
+            const std::vector<aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+            std::vector<aidl::android::hardware::camera::device::StreamBufferRet>* outBuffers,
+            aidl::android::hardware::camera::device::BufferRequestStatus* status);
+
+    ::ndk::ScopedAStatus returnStreamBuffers(
+            const std::vector<aidl::android::hardware::camera::device::StreamBuffer>& buffers);
+
+    // See explanation for why we need a separate class for this in
+    // AidlCamera3Device::AidlCameraDeviceCallbacks in AidlCamera3Device.h
+    class AidlCameraDeviceCallbacks :
+            public aidl::android::hardware::camera::device::BnCameraDeviceCallback {
+      public:
+
+        AidlCameraDeviceCallbacks(wp<AidlCamera3OfflineSession> parent) : mParent(parent)  { }
+        ~AidlCameraDeviceCallbacks() {}
+        ::ndk::ScopedAStatus processCaptureResult(
+                const std::vector<
+                        aidl::android::hardware::camera::device::CaptureResult>& results) override;
+        ::ndk::ScopedAStatus notify(
+                const std::vector<
+                        aidl::android::hardware::camera::device::NotifyMsg>& msgs) override;
+
+        ::ndk::ScopedAStatus requestStreamBuffers(
+                const std::vector<
+                        aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+                std::vector<aidl::android::hardware::camera::device::StreamBufferRet>* out_buffers,
+                aidl::android::hardware::camera::device::BufferRequestStatus* _aidl_return
+                ) override;
+
+        ::ndk::ScopedAStatus returnStreamBuffers(
+                const std::vector<
+                        aidl::android::hardware::camera::device::StreamBuffer>& buffers) override;
+        private:
+            wp<AidlCamera3OfflineSession> mParent = nullptr;
+    };
+
+    // initialize by Camera3Device.
+    explicit AidlCamera3OfflineSession(const String8& id,
+            const sp<camera3::Camera3Stream>& inputStream,
+            const camera3::StreamSet& offlineStreamSet,
+            camera3::BufferRecords&& bufferRecords,
+            const camera3::InFlightRequestMap& offlineReqs,
+            const Camera3OfflineStates& offlineStates,
+            std::shared_ptr<aidl::android::hardware::camera::device::ICameraOfflineSession>
+                    offlineSession) :
+      Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
+              offlineReqs, offlineStates),
+      mSession(offlineSession) {
+        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+      };
+
+    /**
+     * End of CameraOfflineSessionBase interface
+     */
+
+  private:
+    std::shared_ptr<aidl::android::hardware::camera::device::ICameraOfflineSession> mSession;
+    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+    std::unique_ptr<AidlResultMetadataQueue> mResultMetadataQueue;
+
+    std::shared_ptr<AidlCameraDeviceCallbacks> mCallbacks;
+
+    virtual void disconnectSession() override;
+
+}; // class AidlCamera3OfflineSession
+
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
new file mode 100644
index 0000000..02eebd2
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -0,0 +1,343 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCamera3-OutputUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) states.setErrIntf.setErrorState(   \
+    "%s: " fmt, __FUNCTION__,                         \
+    ##__VA_ARGS__)
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <aidlcommonsupport/NativeHandle.h>
+
+#include <camera/CameraUtils.h>
+#include <camera_metadata_hidden.h>
+
+#include "device3/aidl/AidlCamera3OutputUtils.h"
+#include "device3/Camera3OutputUtilsTemplated.h"
+
+#include "system/camera_metadata.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+namespace camera3 {
+
+void processOneCaptureResultLocked(
+        AidlCaptureOutputStates& states,
+        const aidl::android::hardware::camera::device::CaptureResult& result,
+        const std::vector<aidl::android::hardware::camera::device::PhysicalCameraMetadata>
+                &physicalCameraMetadata) {
+    processOneCaptureResultLockedT<AidlCaptureOutputStates,
+        aidl::android::hardware::camera::device::CaptureResult,
+        std::vector<aidl::android::hardware::camera::device::PhysicalCameraMetadata>,
+        std::vector<uint8_t>, AidlResultMetadataQueue,
+        aidl::android::hardware::camera::device::BufferStatus, int8_t>(states, result,
+                physicalCameraMetadata);
+}
+
+void notify(CaptureOutputStates& states,
+        const aidl::android::hardware::camera::device::NotifyMsg& msg) {
+
+    using ErrorCode = aidl::android::hardware::camera::device::ErrorCode;
+    using Tag = aidl::android::hardware::camera::device::NotifyMsg::Tag;
+
+    ATRACE_CALL();
+    camera_notify_msg m;
+
+    switch (msg.getTag()) {
+        case Tag::error:
+            m.type = CAMERA_MSG_ERROR;
+            m.message.error.frame_number = msg.get<Tag::error>().frameNumber;
+            if (msg.get<Tag::error>().errorStreamId >= 0) {
+                sp<Camera3StreamInterface> stream =
+                        states.outputStreams.get(msg.get<Tag::error>().errorStreamId);
+                if (stream == nullptr) {
+                    ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
+                            m.message.error.frame_number, msg.get<Tag::error>().errorStreamId);
+                    return;
+                }
+                m.message.error.error_stream = stream->asHalStream();
+            } else {
+                m.message.error.error_stream = nullptr;
+            }
+            switch (msg.get<Tag::error>().errorCode) {
+                case ErrorCode::ERROR_DEVICE:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_DEVICE;
+                    break;
+                case ErrorCode::ERROR_REQUEST:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_REQUEST;
+                    break;
+                case ErrorCode::ERROR_RESULT:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_RESULT;
+                    break;
+                case ErrorCode::ERROR_BUFFER:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_BUFFER;
+                    break;
+            }
+            break;
+        case Tag::shutter:
+            m.type = CAMERA_MSG_SHUTTER;
+            m.message.shutter.frame_number = msg.get<Tag::shutter>().frameNumber;
+            m.message.shutter.timestamp = msg.get<Tag::shutter>().timestamp;
+            m.message.shutter.readout_timestamp = msg.get<Tag::shutter>().readoutTimestamp;
+            break;
+    }
+    notify(states, &m);
+}
+
+
+// The buffers requested through this call are not tied to any CaptureRequest in
+// particular. They may used by the hal for a particular frame's output buffer
+// or for its internal use as well. In the case that the hal does use any buffer
+// from the requested list here, for a particular frame's output buffer, the
+// buffer will be returned with the processCaptureResult call corresponding to
+// the frame. The other buffers will be returned through returnStreamBuffers.
+// The buffers returned via returnStreamBuffers will not have a valid
+// timestamp(0) and will be dropped by the bufferqueue.
+void requestStreamBuffers(RequestBufferStates& states,
+        const std::vector<aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+        std::vector<::aidl::android::hardware::camera::device::StreamBufferRet>* outBuffers,
+        ::aidl::android::hardware::camera::device::BufferRequestStatus* status) {
+    using aidl::android::hardware::camera::device::BufferStatus;
+    using aidl::android::hardware::camera::device::StreamBuffer;
+    using aidl::android::hardware::camera::device::BufferRequestStatus;
+    using aidl::android::hardware::camera::device::StreamBufferRet;
+    using aidl::android::hardware::camera::device::StreamBufferRequestError;
+    using Tag = aidl::android::hardware::camera::device::StreamBuffersVal::Tag;
+    if (outBuffers == nullptr || status == nullptr) {
+        ALOGE("%s outBuffers / buffer status nullptr", __FUNCTION__);
+        return;
+    }
+    std::lock_guard<std::mutex> lock(states.reqBufferLock);
+    std::vector<StreamBufferRet> bufRets;
+    outBuffers->clear();
+    if (!states.useHalBufManager) {
+        ALOGE("%s: Camera %s does not support HAL buffer management",
+                __FUNCTION__, states.cameraId.string());
+        *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
+        return;
+    }
+
+    SortedVector<int32_t> streamIds;
+    ssize_t sz = streamIds.setCapacity(bufReqs.size());
+    if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
+        ALOGE("%s: failed to allocate memory for %zu buffer requests",
+                __FUNCTION__, bufReqs.size());
+        *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
+        return;
+    }
+
+    if (bufReqs.size() > states.outputStreams.size()) {
+        ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
+                __FUNCTION__, bufReqs.size(), states.outputStreams.size());
+        *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
+        return;
+    }
+
+    // Check for repeated streamId
+    for (const auto& bufReq : bufReqs) {
+        if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
+            ALOGE("%s: Stream %d appear multiple times in buffer requests",
+                    __FUNCTION__, bufReq.streamId);
+            *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
+            return;
+        }
+        streamIds.add(bufReq.streamId);
+    }
+
+    if (!states.reqBufferIntf.startRequestBuffer()) {
+        ALOGE("%s: request buffer disallowed while camera service is configuring",
+                __FUNCTION__);
+        *status = BufferRequestStatus::FAILED_CONFIGURING;
+        return;
+    }
+
+    bufRets.resize(bufReqs.size());
+
+    bool allReqsSucceeds = true;
+    bool oneReqSucceeds = false;
+    for (size_t i = 0; i < bufReqs.size(); i++) {
+        const auto& bufReq = bufReqs[i];
+        auto& bufRet = bufRets[i];
+        int32_t streamId = bufReq.streamId;
+        sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
+        if (outputStream == nullptr) {
+            ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
+            std::vector<StreamBufferRet> emptyBufRets;
+            *status = BufferRequestStatus::FAILED_CONFIGURING;
+            states.reqBufferIntf.endRequestBuffer();
+            return;
+        }
+
+        bufRet.streamId = streamId;
+        if (outputStream->isAbandoned()) {
+            bufRet.val.set<Tag::error>(StreamBufferRequestError::STREAM_DISCONNECTED);
+            allReqsSucceeds = false;
+            continue;
+        }
+
+        size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
+        uint32_t numBuffersRequested = bufReq.numBuffersRequested;
+        size_t totalHandout = handOutBufferCount + numBuffersRequested;
+        uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
+        if (totalHandout > maxBuffers) {
+            // Not able to allocate enough buffer. Exit early for this stream
+            ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
+                    " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
+                    numBuffersRequested, maxBuffers);
+            bufRet.val.set<Tag::error>(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
+            allReqsSucceeds = false;
+            continue;
+        }
+
+        std::vector<StreamBuffer> tmpRetBuffers(numBuffersRequested);
+        bool currentReqSucceeds = true;
+        std::vector<camera_stream_buffer_t> streamBuffers(numBuffersRequested);
+        std::vector<buffer_handle_t> newBuffers;
+        size_t numAllocatedBuffers = 0;
+        size_t numPushedInflightBuffers = 0;
+        for (size_t b = 0; b < numBuffersRequested; b++) {
+            camera_stream_buffer_t& sb = streamBuffers[b];
+            // Since this method can run concurrently with request thread
+            // We need to update the wait duration everytime we call getbuffer
+            nsecs_t waitDuration =  states.reqBufferIntf.getWaitDuration();
+            status_t res = outputStream->getBuffer(&sb, waitDuration);
+            if (res != OK) {
+                if (res == NO_INIT || res == DEAD_OBJECT) {
+                    ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
+                            __FUNCTION__, streamId, strerror(-res), res);
+                    bufRet.val.set<Tag::error>(StreamBufferRequestError::STREAM_DISCONNECTED);
+                    states.sessionStatsBuilder.stopCounter(streamId);
+                } else {
+                    ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
+                            __FUNCTION__, streamId, strerror(-res), res);
+                    if (res == TIMED_OUT || res == NO_MEMORY) {
+                        bufRet.val.set<Tag::error>(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
+                    } else if (res == INVALID_OPERATION) {
+                        bufRet.val.set<Tag::error>(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
+                    } else {
+                        bufRet.val.set<Tag::error>(StreamBufferRequestError::UNKNOWN_ERROR);
+                    }
+                }
+                currentReqSucceeds = false;
+                break;
+            }
+            numAllocatedBuffers++;
+
+            buffer_handle_t *buffer = sb.buffer;
+            auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
+            bool isNewBuffer = pair.first;
+            uint64_t bufferId = pair.second;
+            StreamBuffer& hBuf = tmpRetBuffers[b];
+
+            hBuf.streamId = streamId;
+            hBuf.bufferId = bufferId;
+
+            hBuf.buffer = (isNewBuffer) ? camera3::dupToAidlIfNotNull(*buffer) :
+                    aidl::android::hardware::common::NativeHandle();
+            hBuf.status = BufferStatus::OK;
+            hBuf.releaseFence =  aidl::android::hardware::common::NativeHandle();
+            if (isNewBuffer) {
+                newBuffers.push_back(*buffer);
+            }
+
+            native_handle_t *acquireFence = nullptr;
+            if (sb.acquire_fence != -1) {
+                acquireFence = native_handle_create(1,0);
+                acquireFence->data[0] = sb.acquire_fence;
+            }
+            //makeToAidl passes ownership to aidl NativeHandle made. Ownership
+            //is passed : see system/window.h : dequeueBuffer
+            hBuf.acquireFence = makeToAidlIfNotNull(acquireFence);
+            if (acquireFence != nullptr) {
+                native_handle_delete(acquireFence);
+            }
+            hBuf.releaseFence =  aidl::android::hardware::common::NativeHandle();
+
+            res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
+            if (res != OK) {
+                ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
+                        __FUNCTION__, streamId, strerror(-res), res);
+                bufRet.val.set<Tag::error>(StreamBufferRequestError::UNKNOWN_ERROR);
+                currentReqSucceeds = false;
+                break;
+            }
+            numPushedInflightBuffers++;
+        }
+        if (currentReqSucceeds) {
+            bufRet.val.set<Tag::buffers>(std::move(tmpRetBuffers));
+            oneReqSucceeds = true;
+        } else {
+            allReqsSucceeds = false;
+            for (size_t b = 0; b < numPushedInflightBuffers; b++) {
+                StreamBuffer& hBuf = tmpRetBuffers[b];
+                buffer_handle_t* buffer;
+                status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
+                        hBuf.bufferId, &buffer);
+                if (res != OK) {
+                    SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
+                            __FUNCTION__, streamId, strerror(-res), res);
+                }
+            }
+            for (size_t b = 0; b < numAllocatedBuffers; b++) {
+                camera_stream_buffer_t& sb = streamBuffers[b];
+                sb.acquire_fence = -1;
+                sb.status = CAMERA_BUFFER_STATUS_ERROR;
+            }
+            returnOutputBuffers(states.useHalBufManager, nullptr,
+                    streamBuffers.data(), numAllocatedBuffers, 0,
+                    0, false,
+                    0, states.sessionStatsBuilder);
+            for (auto buf : newBuffers) {
+                states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
+            }
+        }
+    }
+
+    *status = allReqsSucceeds ? BufferRequestStatus::OK :
+            oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
+                             BufferRequestStatus::FAILED_UNKNOWN,
+    // Transfer ownership of buffer fds to outBuffers
+    *outBuffers = std::move(bufRets);
+
+    states.reqBufferIntf.endRequestBuffer();
+}
+
+void returnStreamBuffers(ReturnBufferStates& states,
+        const std::vector<aidl::android::hardware::camera::device::StreamBuffer>& buffers) {
+    returnStreamBuffersT(states, buffers);
+}
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
new file mode 100644
index 0000000..e795624
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_AIDL_CAMERA3_OUTPUT_UTILS_H
+#define ANDROID_SERVERS_AIDL_CAMERA3_OUTPUT_UTILS_H
+
+#include <memory>
+#include <mutex>
+
+#include <cutils/native_handle.h>
+
+#include <aidlcommonsupport/NativeHandle.h>
+#include <fmq/AidlMessageQueue.h>
+
+#include <common/CameraDeviceBase.h>
+
+#include <aidl/android/hardware/camera/device/ICameraDevice.h>
+#include <aidl/android/hardware/camera/device/ICameraDeviceCallback.h>
+#include "device3/BufferUtils.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3Stream.h"
+#include "device3/Camera3OutputStreamInterface.h"
+#include "device3/Camera3OutputUtils.h"
+#include "utils/SessionStatsBuilder.h"
+#include "utils/TagMonitor.h"
+
+namespace android {
+
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+
+using AidlResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+namespace camera3 {
+    inline aidl::android::hardware::common::NativeHandle dupToAidlIfNotNull(
+            const native_handle_t *nh) {
+        if (nh == nullptr) {
+            return aidl::android::hardware::common::NativeHandle();
+        }
+        return dupToAidl(nh);
+    }
+
+    inline aidl::android::hardware::common::NativeHandle makeToAidlIfNotNull(
+            const native_handle_t *nh) {
+        if (nh == nullptr) {
+            return aidl::android::hardware::common::NativeHandle();
+        }
+        return makeToAidl(nh);
+    }
+
+    /**
+     * Helper methods shared between AidlCamera3Device/AidlCamera3OfflineSession for HAL callbacks
+     */
+
+    // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
+    // callbacks
+    struct AidlCaptureOutputStates : public CaptureOutputStates {
+        std::unique_ptr<AidlResultMetadataQueue>& fmq;
+    };
+
+    // Handle one capture result. Assume callers hold the lock to serialize all
+    // processCaptureResult calls
+    void processOneCaptureResultLocked(
+            AidlCaptureOutputStates& states,
+            const aidl::android::hardware::camera::device::CaptureResult& result,
+            const std::vector<aidl::android::hardware::camera::device::PhysicalCameraMetadata>
+                    &physicalCameraMetadata);
+
+    void notify(CaptureOutputStates& states,
+        const aidl::android::hardware::camera::device::NotifyMsg& msg,
+        bool hasReadoutTime, uint64_t readoutTime);
+
+    void notify(CaptureOutputStates& states,
+        const aidl::android::hardware::camera::device::NotifyMsg& msg);
+
+    void requestStreamBuffers(RequestBufferStates& states,
+        const std::vector<aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
+        std::vector<::aidl::android::hardware::camera::device::StreamBufferRet>* out_buffers,
+        ::aidl::android::hardware::camera::device::BufferRequestStatus* _aidl_return);
+
+    void returnStreamBuffers(ReturnBufferStates& states,
+        const std::vector<aidl::android::hardware::camera::device::StreamBuffer>& buffers);
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
new file mode 100644
index 0000000..4bb426c
--- /dev/null
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -0,0 +1,1856 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HidlCamera3-Device"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0  // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macro for transient errors
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+            ##__VA_ARGS__)
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) setErrorState(   \
+    "%s: " fmt, __FUNCTION__,              \
+    ##__VA_ARGS__)
+#define SET_ERR_L(fmt, ...) setErrorStateLocked( \
+    "%s: " fmt, __FUNCTION__,                    \
+    ##__VA_ARGS__)
+
+
+#include <inttypes.h>
+
+#include <utility>
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Timers.h>
+#include <cutils/properties.h>
+
+#include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+
+#include "device3/hidl/HidlCamera3OutputUtils.h"
+#include "device3/hidl/HidlCamera3OfflineSession.h"
+#include "utils/SessionConfigurationUtilsHidl.h"
+#include "utils/TraceHFR.h"
+
+#include "../../common/hidl/HidlProviderInfo.h"
+#include "HidlCamera3Device.h"
+
+#include <algorithm>
+#include <tuple>
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+using namespace android::hardware::camera::device::V3_2;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
+
+namespace android {
+
+hardware::graphics::common::V1_0::PixelFormat HidlCamera3Device::mapToPixelFormat(
+        int frameworkFormat) {
+    return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
+}
+
+DataspaceFlags HidlCamera3Device::mapToHidlDataspace(
+        android_dataspace dataSpace) {
+    return dataSpace;
+}
+
+BufferUsageFlags HidlCamera3Device::mapToConsumerUsage(
+        uint64_t usage) {
+    return usage;
+}
+
+StreamRotation HidlCamera3Device::mapToStreamRotation(camera_stream_rotation_t rotation) {
+    switch (rotation) {
+        case CAMERA_STREAM_ROTATION_0:
+            return StreamRotation::ROTATION_0;
+        case CAMERA_STREAM_ROTATION_90:
+            return StreamRotation::ROTATION_90;
+        case CAMERA_STREAM_ROTATION_180:
+            return StreamRotation::ROTATION_180;
+        case CAMERA_STREAM_ROTATION_270:
+            return StreamRotation::ROTATION_270;
+    }
+    ALOGE("%s: Unknown stream rotation %d", __FUNCTION__, rotation);
+    return StreamRotation::ROTATION_0;
+}
+
+status_t HidlCamera3Device::mapToStreamConfigurationMode(
+        camera_stream_configuration_mode_t operationMode, StreamConfigurationMode *mode) {
+    if (mode == nullptr) return BAD_VALUE;
+    if (operationMode < CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START) {
+        switch(operationMode) {
+            case CAMERA_STREAM_CONFIGURATION_NORMAL_MODE:
+                *mode = StreamConfigurationMode::NORMAL_MODE;
+                break;
+            case CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
+                *mode = StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE;
+                break;
+            default:
+                ALOGE("%s: Unknown stream configuration mode %d", __FUNCTION__, operationMode);
+                return BAD_VALUE;
+        }
+    } else {
+        *mode = static_cast<StreamConfigurationMode>(operationMode);
+    }
+    return OK;
+}
+
+int HidlCamera3Device::mapToFrameworkFormat(
+        hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
+    return static_cast<uint32_t>(pixelFormat);
+}
+
+android_dataspace HidlCamera3Device::mapToFrameworkDataspace(
+        DataspaceFlags dataSpace) {
+    return static_cast<android_dataspace>(dataSpace);
+}
+
+uint64_t HidlCamera3Device::mapConsumerToFrameworkUsage(
+        BufferUsageFlags usage) {
+    return usage;
+}
+
+uint64_t HidlCamera3Device::mapProducerToFrameworkUsage(
+        BufferUsageFlags usage) {
+    return usage;
+}
+
+status_t HidlCamera3Device::initialize(sp<CameraProviderManager> manager,
+        const String8& monitorTags) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    ALOGV("%s: Initializing HIDL device for camera %s", __FUNCTION__, mId.string());
+    if (mStatus != STATUS_UNINITIALIZED) {
+        CLOGE("Already initialized!");
+        return INVALID_OPERATION;
+    }
+    if (manager == nullptr) return INVALID_OPERATION;
+
+    sp<ICameraDeviceSession> session;
+    ATRACE_BEGIN("CameraHal::openSession");
+    status_t res = manager->openHidlSession(mId.string(), this,
+            /*out*/ &session);
+    ATRACE_END();
+    if (res != OK) {
+        SET_ERR_L("Could not open camera session: %s (%d)", strerror(-res), res);
+        return res;
+    }
+
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    if (res != OK) {
+        SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
+        session->close();
+        return res;
+    }
+    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
+
+    std::vector<std::string> physicalCameraIds;
+    bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
+    if (isLogical) {
+        for (auto& physicalId : physicalCameraIds) {
+            // Do not override characteristics for physical cameras
+            res = manager->getCameraCharacteristics(
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+            if (res != OK) {
+                SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
+                        physicalId.c_str(), strerror(-res), res);
+                session->close();
+                return res;
+            }
+
+            bool usePrecorrectArray =
+                    DistortionMapper::isDistortionSupported(mPhysicalDeviceInfoMap[physicalId]);
+            if (usePrecorrectArray) {
+                res = mDistortionMappers[physicalId].setupStaticInfo(
+                        mPhysicalDeviceInfoMap[physicalId]);
+                if (res != OK) {
+                    SET_ERR_L("Unable to read camera %s's calibration fields for distortion "
+                            "correction", physicalId.c_str());
+                    session->close();
+                    return res;
+                }
+            }
+
+            mZoomRatioMappers[physicalId] = ZoomRatioMapper(
+                    &mPhysicalDeviceInfoMap[physicalId],
+                    mSupportNativeZoomRatio, usePrecorrectArray);
+
+            if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+                    mPhysicalDeviceInfoMap[physicalId])) {
+                mUHRCropAndMeteringRegionMappers[physicalId] =
+                        UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
+                                usePrecorrectArray);
+            }
+        }
+    }
+
+    std::shared_ptr<RequestMetadataQueue> queue;
+    auto requestQueueRet = session->getCaptureRequestMetadataQueue(
+        [&queue](const auto& descriptor) {
+            queue = std::make_shared<RequestMetadataQueue>(descriptor);
+            if (!queue->isValid() || queue->availableToWrite() <= 0) {
+                ALOGE("HAL returns empty request metadata fmq, not use it");
+                queue = nullptr;
+                // don't use the queue onwards.
+            }
+        });
+    if (!requestQueueRet.isOk()) {
+        ALOGE("Transaction error when getting request metadata fmq: %s, not use it",
+                requestQueueRet.description().c_str());
+        return DEAD_OBJECT;
+    }
+
+    std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
+    auto resultQueueRet = session->getCaptureResultMetadataQueue(
+        [&resQueue](const auto& descriptor) {
+            resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+            if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+                ALOGE("HAL returns empty result metadata fmq, not use it");
+                resQueue = nullptr;
+                // Don't use the resQueue onwards.
+            }
+        });
+    if (!resultQueueRet.isOk()) {
+        ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                resultQueueRet.description().c_str());
+        return DEAD_OBJECT;
+    }
+    IF_ALOGV() {
+        session->interfaceChain([](
+            ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) {
+                ALOGV("Session interface chain:");
+                for (const auto& iface : interfaceChain) {
+                    ALOGV("  %s", iface.c_str());
+                }
+            });
+    }
+
+    camera_metadata_entry bufMgrMode =
+            mDeviceInfo.find(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION);
+    if (bufMgrMode.count > 0) {
+         mUseHalBufManager = (bufMgrMode.data.u8[0] ==
+            ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
+    }
+
+    camera_metadata_entry_t capabilities = mDeviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    for (size_t i = 0; i < capabilities.count; i++) {
+        uint8_t capability = capabilities.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING) {
+            mSupportOfflineProcessing = true;
+        }
+    }
+
+    mInterface = new HidlHalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
+
+    std::string providerType;
+    mVendorTagId = manager->getProviderTagIdLocked(mId.string());
+    mTagMonitor.initialize(mVendorTagId);
+    if (!monitorTags.isEmpty()) {
+        mTagMonitor.parseTagsToMonitor(String8(monitorTags));
+    }
+
+    // Metadata tags needs fixup for monochrome camera device version less
+    // than 3.5.
+    hardware::hidl_version maxVersion{0,0};
+    IPCTransport transport = IPCTransport::HIDL;
+    res = manager->getHighestSupportedVersion(mId.string(), &maxVersion, &transport);
+    if (res != OK) {
+        ALOGE("%s: Error in getting camera device version id: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    int deviceVersion = HARDWARE_DEVICE_API_VERSION(
+            maxVersion.get_major(), maxVersion.get_minor());
+
+    bool isMonochrome = false;
+    for (size_t i = 0; i < capabilities.count; i++) {
+        uint8_t capability = capabilities.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
+            isMonochrome = true;
+        }
+    }
+    mNeedFixupMonochromeTags = (isMonochrome && deviceVersion < CAMERA_DEVICE_API_VERSION_3_5);
+
+    return initializeCommonLocked();
+}
+
+hardware::Return<void> HidlCamera3Device::requestStreamBuffers(
+        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+        requestStreamBuffers_cb _hidl_cb) {
+    RequestBufferStates states {
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        *this, *mInterface, *this};
+    camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3Device::returnStreamBuffers(
+        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+    ReturnBufferStates states {
+        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, *mInterface};
+    camera3::returnStreamBuffers(states, buffers);
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3Device::processCaptureResult_3_4(
+        const hardware::hidl_vec<
+                hardware::camera::device::V3_4::CaptureResult>& results) {
+    // Ideally we should grab mLock, but that can lead to deadlock, and
+    // it's not super important to get up to date value of mStatus for this
+    // warning print, hence skipping the lock here
+    if (mStatus == STATUS_ERROR) {
+        // Per API contract, HAL should act as closed after device error
+        // But mStatus can be set to error by framework as well, so just log
+        // a warning here.
+        ALOGW("%s: received capture result in error state.", __FUNCTION__);
+    }
+
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
+    if (mProcessCaptureResultLock.tryLock() != OK) {
+        // This should never happen; it indicates a wrong client implementation
+        // that doesn't follow the contract. But, we can be tolerant here.
+        ALOGE("%s: callback overlapped! waiting 1s...",
+                __FUNCTION__);
+        if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
+            ALOGE("%s: cannot acquire lock in 1s, dropping results",
+                    __FUNCTION__);
+            // really don't know what to do, so bail out.
+            return hardware::Void();
+        }
+    }
+    HidlCaptureOutputStates states {
+       {
+        mId,
+        mInFlightLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+    };
+
+    //HidlCaptureOutputStates hidlStates {
+    //}
+
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
+    }
+    mProcessCaptureResultLock.unlock();
+    return hardware::Void();
+}
+
+// Only one processCaptureResult should be called at a time, so
+// the locks won't block. The locks are present here simply to enforce this.
+hardware::Return<void> HidlCamera3Device::processCaptureResult(
+        const hardware::hidl_vec<
+                hardware::camera::device::V3_2::CaptureResult>& results) {
+    hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+
+    // Ideally we should grab mLock, but that can lead to deadlock, and
+    // it's not super important to get up to date value of mStatus for this
+    // warning print, hence skipping the lock here
+    if (mStatus == STATUS_ERROR) {
+        // Per API contract, HAL should act as closed after device error
+        // But mStatus can be set to error by framework as well, so just log
+        // a warning here.
+        ALOGW("%s: received capture result in error state.", __FUNCTION__);
+    }
+
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
+    if (mProcessCaptureResultLock.tryLock() != OK) {
+        // This should never happen; it indicates a wrong client implementation
+        // that doesn't follow the contract. But, we can be tolerant here.
+        ALOGE("%s: callback overlapped! waiting 1s...",
+                __FUNCTION__);
+        if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
+            ALOGE("%s: cannot acquire lock in 1s, dropping results",
+                    __FUNCTION__);
+            // really don't know what to do, so bail out.
+            return hardware::Void();
+        }
+    }
+
+    HidlCaptureOutputStates states {
+      {mId,
+        mInFlightLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+    };
+
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result, noPhysMetadata);
+    }
+    mProcessCaptureResultLock.unlock();
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3Device::notify(
+        const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
+    return notifyHelper<hardware::camera::device::V3_2::NotifyMsg>(msgs);
+}
+
+template<typename NotifyMsgType>
+hardware::Return<void> HidlCamera3Device::notifyHelper(
+        const hardware::hidl_vec<NotifyMsgType>& msgs) {
+    // Ideally we should grab mLock, but that can lead to deadlock, and
+    // it's not super important to get up to date value of mStatus for this
+    // warning print, hence skipping the lock here
+    if (mStatus == STATUS_ERROR) {
+        // Per API contract, HAL should act as closed after device error
+        // But mStatus can be set to error by framework as well, so just log
+        // a warning here.
+        ALOGW("%s: received notify message in error state.", __FUNCTION__);
+    }
+
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> l(mOutputLock);
+        listener = mListener.promote();
+    }
+
+    HidlCaptureOutputStates states {
+      {mId,
+        mInFlightLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
+    };
+    for (const auto& msg : msgs) {
+        camera3::notify(states, msg);
+    }
+    return hardware::Void();
+}
+
+status_t HidlCamera3Device::switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/ sp<CameraOfflineSessionBase>* session) {
+    ATRACE_CALL();
+    if (session == nullptr) {
+        ALOGE("%s: session must not be null", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock il(mInterfaceLock);
+
+    bool hasInputStream = mInputStream != nullptr;
+    int32_t inputStreamId = hasInputStream ? mInputStream->getId() : -1;
+    bool inputStreamSupportsOffline = hasInputStream ?
+            mInputStream->getOfflineProcessingSupport() : false;
+    auto outputStreamIds = mOutputStreams.getStreamIds();
+    auto streamIds = outputStreamIds;
+    if (hasInputStream) {
+        streamIds.push_back(mInputStream->getId());
+    }
+
+    // Check all streams in streamsToKeep supports offline mode
+    for (auto id : streamsToKeep) {
+        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+            ALOGE("%s: Unknown stream ID %d", __FUNCTION__, id);
+            return BAD_VALUE;
+        } else if (id == inputStreamId) {
+            if (!inputStreamSupportsOffline) {
+                ALOGE("%s: input stream %d cannot be switched to offline",
+                        __FUNCTION__, id);
+                return BAD_VALUE;
+            }
+        } else {
+            sp<camera3::Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
+            if (!stream->getOfflineProcessingSupport()) {
+                ALOGE("%s: output stream %d cannot be switched to offline",
+                        __FUNCTION__, id);
+                return BAD_VALUE;
+            }
+        }
+    }
+    // TODO: block surface sharing and surface group streams until we can support them
+
+    // Stop repeating request, wait until all remaining requests are submitted, then call into
+    // HAL switchToOffline
+    hardware::camera::device::V3_6::CameraOfflineSessionInfo offlineSessionInfo;
+    sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession;
+    camera3::BufferRecords bufferRecords;
+    status_t ret = static_cast<HidlRequestThread *>(mRequestThread.get())->switchToOffline(
+            streamsToKeep, &offlineSessionInfo, &offlineSession, &bufferRecords);
+
+    if (ret != OK) {
+        SET_ERR("Switch to offline failed: %s (%d)", strerror(-ret), ret);
+        return ret;
+    }
+
+    bool succ = mRequestBufferSM.onSwitchToOfflineSuccess();
+    if (!succ) {
+        SET_ERR("HAL must not be calling requestStreamBuffers call");
+        // TODO: block ALL callbacks from HAL till app configured new streams?
+        return UNKNOWN_ERROR;
+    }
+
+    // Verify offlineSessionInfo
+    std::vector<int32_t> offlineStreamIds;
+    offlineStreamIds.reserve(offlineSessionInfo.offlineStreams.size());
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        // verify stream IDs
+        int32_t id = offlineStream.id;
+        if (std::find(streamIds.begin(), streamIds.end(), id) == streamIds.end()) {
+            SET_ERR("stream ID %d not found!", id);
+            return UNKNOWN_ERROR;
+        }
+
+        // When not using HAL buf manager, only allow streams requested by app to be preserved
+        if (!mUseHalBufManager) {
+            if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
+                SET_ERR("stream ID %d must not be switched to offline!", id);
+                return UNKNOWN_ERROR;
+            }
+        }
+
+        offlineStreamIds.push_back(id);
+        sp<Camera3StreamInterface> stream = (id == inputStreamId) ?
+                static_cast<sp<Camera3StreamInterface>>(mInputStream) :
+                static_cast<sp<Camera3StreamInterface>>(mOutputStreams.get(id));
+        // Verify number of outstanding buffers
+        if (stream->getOutstandingBuffersCount() != offlineStream.numOutstandingBuffers) {
+            SET_ERR("Offline stream %d # of remaining buffer mismatch: (%zu,%d) (service/HAL)",
+                    id, stream->getOutstandingBuffersCount(), offlineStream.numOutstandingBuffers);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Verify all streams to be deleted don't have any outstanding buffers
+    if (hasInputStream && std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+                inputStreamId) == offlineStreamIds.end()) {
+        if (mInputStream->hasOutstandingBuffers()) {
+            SET_ERR("Input stream %d still has %zu outstanding buffer!",
+                    inputStreamId, mInputStream->getOutstandingBuffersCount());
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    for (const auto& outStreamId : outputStreamIds) {
+        if (std::find(offlineStreamIds.begin(), offlineStreamIds.end(),
+                outStreamId) == offlineStreamIds.end()) {
+            auto outStream = mOutputStreams.get(outStreamId);
+            if (outStream->hasOutstandingBuffers()) {
+                SET_ERR("Output stream %d still has %zu outstanding buffer!",
+                        outStreamId, outStream->getOutstandingBuffersCount());
+                return UNKNOWN_ERROR;
+            }
+        }
+    }
+
+    InFlightRequestMap offlineReqs;
+    // Verify inflight requests and their pending buffers
+    {
+        std::lock_guard<std::mutex> l(mInFlightLock);
+        for (auto offlineReq : offlineSessionInfo.offlineRequests) {
+            int idx = mInFlightMap.indexOfKey(offlineReq.frameNumber);
+            if (idx == NAME_NOT_FOUND) {
+                SET_ERR("Offline request frame number %d not found!", offlineReq.frameNumber);
+                return UNKNOWN_ERROR;
+            }
+
+            const auto& inflightReq = mInFlightMap.valueAt(idx);
+            // TODO: check specific stream IDs
+            size_t numBuffersLeft = static_cast<size_t>(inflightReq.numBuffersLeft);
+            if (numBuffersLeft != offlineReq.pendingStreams.size()) {
+                SET_ERR("Offline request # of remaining buffer mismatch: (%d,%d) (service/HAL)",
+                        inflightReq.numBuffersLeft, offlineReq.pendingStreams.size());
+                return UNKNOWN_ERROR;
+            }
+            offlineReqs.add(offlineReq.frameNumber, inflightReq);
+        }
+    }
+
+    // Create Camera3OfflineSession and transfer object ownership
+    //   (streams, inflight requests, buffer caches)
+    camera3::StreamSet offlineStreamSet;
+    sp<camera3::Camera3Stream> inputStream;
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        int32_t id = offlineStream.id;
+        if (mInputStream != nullptr && id == mInputStream->getId()) {
+            inputStream = mInputStream;
+        } else {
+            offlineStreamSet.add(id, mOutputStreams.get(id));
+        }
+    }
+
+    // TODO: check if we need to lock before copying states
+    //       though technically no other thread should be talking to Camera3Device at this point
+    Camera3OfflineStates offlineStates(
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
+            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
+            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
+            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
+            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
+            mZoomRatioMappers, mRotateAndCropMappers);
+
+    *session = new HidlCamera3OfflineSession(mId, inputStream, offlineStreamSet,
+            std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+
+    // Delete all streams that has been transferred to offline session
+    Mutex::Autolock l(mLock);
+    for (auto offlineStream : offlineSessionInfo.offlineStreams) {
+        int32_t id = offlineStream.id;
+        if (mInputStream != nullptr && id == mInputStream->getId()) {
+            mInputStream.clear();
+        } else {
+            mOutputStreams.remove(id);
+        }
+    }
+
+    // disconnect all other streams and switch to UNCONFIGURED state
+    if (mInputStream != nullptr) {
+        ret = mInputStream->disconnect();
+        if (ret != OK) {
+            SET_ERR_L("disconnect input stream failed!");
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    for (auto streamId : mOutputStreams.getStreamIds()) {
+        sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
+        ret = stream->disconnect();
+        if (ret != OK) {
+            SET_ERR_L("disconnect output stream %d failed!", streamId);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    mInputStream.clear();
+    mOutputStreams.clear();
+    mNeedConfig = true;
+    internalUpdateStatusLocked(STATUS_UNCONFIGURED);
+    mOperatingMode = NO_MODE;
+    mIsConstrainedHighSpeedConfiguration = false;
+    mRequestThread->clearPreviousRequest();
+
+    return OK;
+    // TO be done by CameraDeviceClient/Camera3OfflineSession
+    // register the offline client to camera service
+    // Setup result passthing threads etc
+    // Initialize offline session so HAL can start sending callback to it (result Fmq)
+    // TODO: check how many onIdle callback will be sent
+    // Java side to make sure the CameraCaptureSession is properly closed
+}
+
+sp<Camera3Device::RequestThread> HidlCamera3Device::createNewRequestThread(
+                wp<Camera3Device> parent, sp<camera3::StatusTracker> statusTracker,
+                sp<Camera3Device::HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute) {
+        return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
+                useHalBufManager, supportCameraMute);
+};
+
+sp<Camera3Device::Camera3DeviceInjectionMethods>
+HidlCamera3Device::createCamera3DeviceInjectionMethods(wp<Camera3Device> parent) {
+    return new HidlCamera3DeviceInjectionMethods(parent);
+}
+
+status_t HidlCamera3Device::injectionCameraInitialize(const String8 &injectedCamId,
+            sp<CameraProviderManager> manager) {
+        return (static_cast<HidlCamera3DeviceInjectionMethods *>(
+                mInjectionMethods.get()))->injectionInitialize(injectedCamId, manager, this);
+};
+
+
+HidlCamera3Device::HidlHalInterface::HidlHalInterface(
+            sp<device::V3_2::ICameraDeviceSession> &session,
+            std::shared_ptr<RequestMetadataQueue> queue,
+            bool useHalBufManager, bool supportOfflineProcessing) :
+        HalInterface(useHalBufManager, supportOfflineProcessing),
+        mHidlSession(session),
+        mRequestMetadataQueue(queue) {
+    // Check with hardware service manager if we can downcast these interfaces
+    // Somewhat expensive, so cache the results at startup
+    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_7.isOk()) {
+        mHidlSession_3_7 = castResult_3_7;
+    }
+    auto castResult_3_6 = device::V3_6::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_6.isOk()) {
+        mHidlSession_3_6 = castResult_3_6;
+    }
+    auto castResult_3_5 = device::V3_5::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_5.isOk()) {
+        mHidlSession_3_5 = castResult_3_5;
+    }
+    auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_4.isOk()) {
+        mHidlSession_3_4 = castResult_3_4;
+    }
+    auto castResult_3_3 = device::V3_3::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_3.isOk()) {
+        mHidlSession_3_3 = castResult_3_3;
+    }
+}
+
+bool HidlCamera3Device::HidlHalInterface::valid() {
+    return (mHidlSession != nullptr);
+}
+
+void HidlCamera3Device::HidlHalInterface::clear() {
+    mHidlSession_3_7.clear();
+    mHidlSession_3_6.clear();
+    mHidlSession_3_5.clear();
+    mHidlSession_3_4.clear();
+    mHidlSession_3_3.clear();
+    mHidlSession.clear();
+}
+
+status_t HidlCamera3Device::HidlHalInterface::constructDefaultRequestSettings(
+        camera_request_template_t templateId,
+        /*out*/ camera_metadata_t **requestTemplate) {
+    ATRACE_NAME("CameraHidlHal::constructDefaultRequestSettings");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    common::V1_0::Status status;
+
+    auto requestCallback = [&status, &requestTemplate]
+            (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
+            status = s;
+            if (status == common::V1_0::Status::OK) {
+                const camera_metadata *r =
+                        reinterpret_cast<const camera_metadata_t*>(request.data());
+                size_t expectedSize = request.size();
+                int ret = validate_camera_metadata_structure(r, &expectedSize);
+                if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
+                    *requestTemplate = clone_camera_metadata(r);
+                    if (*requestTemplate == nullptr) {
+                        ALOGE("%s: Unable to clone camera metadata received from HAL",
+                                __FUNCTION__);
+                        status = common::V1_0::Status::INTERNAL_ERROR;
+                    }
+                } else {
+                    ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+                    status = common::V1_0::Status::INTERNAL_ERROR;
+                }
+            }
+        };
+    hardware::Return<void> err;
+    RequestTemplate id;
+    switch (templateId) {
+        case CAMERA_TEMPLATE_PREVIEW:
+            id = RequestTemplate::PREVIEW;
+            break;
+        case CAMERA_TEMPLATE_STILL_CAPTURE:
+            id = RequestTemplate::STILL_CAPTURE;
+            break;
+        case CAMERA_TEMPLATE_VIDEO_RECORD:
+            id = RequestTemplate::VIDEO_RECORD;
+            break;
+        case CAMERA_TEMPLATE_VIDEO_SNAPSHOT:
+            id = RequestTemplate::VIDEO_SNAPSHOT;
+            break;
+        case CAMERA_TEMPLATE_ZERO_SHUTTER_LAG:
+            id = RequestTemplate::ZERO_SHUTTER_LAG;
+            break;
+        case CAMERA_TEMPLATE_MANUAL:
+            id = RequestTemplate::MANUAL;
+            break;
+        default:
+            // Unknown template ID, or this HAL is too old to support it
+            return BAD_VALUE;
+    }
+    err = mHidlSession->constructDefaultRequestSettings(id, requestCallback);
+
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        res = DEAD_OBJECT;
+    } else {
+        res = HidlProviderInfo::mapToStatusT(status);
+    }
+
+    return res;
+}
+
+bool HidlCamera3Device::HidlHalInterface::isReconfigurationRequired(
+        CameraMetadata& oldSessionParams, CameraMetadata& newSessionParams) {
+    // We do reconfiguration by default;
+    bool ret = true;
+    if ((mHidlSession_3_5 != nullptr) && mIsReconfigurationQuerySupported) {
+        android::hardware::hidl_vec<uint8_t> oldParams, newParams;
+        camera_metadata_t* oldSessioMeta = const_cast<camera_metadata_t*>(
+                oldSessionParams.getAndLock());
+        camera_metadata_t* newSessioMeta = const_cast<camera_metadata_t*>(
+                newSessionParams.getAndLock());
+        oldParams.setToExternal(reinterpret_cast<uint8_t*>(oldSessioMeta),
+                get_camera_metadata_size(oldSessioMeta));
+        newParams.setToExternal(reinterpret_cast<uint8_t*>(newSessioMeta),
+                get_camera_metadata_size(newSessioMeta));
+        hardware::camera::common::V1_0::Status callStatus;
+        bool required;
+        auto hidlCb = [&callStatus, &required] (hardware::camera::common::V1_0::Status s,
+                bool requiredFlag) {
+            callStatus = s;
+            required = requiredFlag;
+        };
+        auto err = mHidlSession_3_5->isReconfigurationRequired(oldParams, newParams, hidlCb);
+        oldSessionParams.unlock(oldSessioMeta);
+        newSessionParams.unlock(newSessioMeta);
+        if (err.isOk()) {
+            switch (callStatus) {
+                case hardware::camera::common::V1_0::Status::OK:
+                    ret = required;
+                    break;
+                case hardware::camera::common::V1_0::Status::METHOD_NOT_SUPPORTED:
+                    mIsReconfigurationQuerySupported = false;
+                    ret = true;
+                    break;
+                default:
+                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
+                    ret = true;
+            }
+        } else {
+            ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, err.description().c_str());
+            ret = true;
+        }
+    }
+
+    return ret;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::configureStreams(
+        const camera_metadata_t *sessionParams,
+        camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+    ATRACE_NAME("CameraHal::configureStreams");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    if (config->input_is_multi_resolution && mHidlSession_3_7 == nullptr) {
+        ALOGE("%s: Camera device doesn't support multi-resolution input stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    // Convert stream config to HIDL
+    std::set<int> activeStreams;
+    device::V3_2::StreamConfiguration requestedConfiguration3_2;
+    device::V3_4::StreamConfiguration requestedConfiguration3_4;
+    device::V3_7::StreamConfiguration requestedConfiguration3_7;
+    requestedConfiguration3_2.streams.resize(config->num_streams);
+    requestedConfiguration3_4.streams.resize(config->num_streams);
+    requestedConfiguration3_7.streams.resize(config->num_streams);
+    for (size_t i = 0; i < config->num_streams; i++) {
+        device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
+        device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
+        device::V3_7::Stream &dst3_7 = requestedConfiguration3_7.streams[i];
+        camera3::camera_stream_t *src = config->streams[i];
+
+        Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        cam3stream->setBufferFreedListener(this);
+        int streamId = cam3stream->getId();
+        StreamType streamType;
+        switch (src->stream_type) {
+            case CAMERA_STREAM_OUTPUT:
+                streamType = StreamType::OUTPUT;
+                break;
+            case CAMERA_STREAM_INPUT:
+                streamType = StreamType::INPUT;
+                break;
+            default:
+                ALOGE("%s: Stream %d: Unsupported stream type %d",
+                        __FUNCTION__, streamId, config->streams[i]->stream_type);
+                return BAD_VALUE;
+        }
+        dst3_2.id = streamId;
+        dst3_2.streamType = streamType;
+        dst3_2.width = src->width;
+        dst3_2.height = src->height;
+        dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
+        dst3_2.rotation = mapToStreamRotation((camera_stream_rotation_t) src->rotation);
+        // For HidlSession version 3.5 or newer, the format and dataSpace sent
+        // to HAL are original, not the overridden ones.
+        if (mHidlSession_3_5 != nullptr) {
+            dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden() ?
+                    cam3stream->getOriginalFormat() : src->format);
+            dst3_2.dataSpace = mapToHidlDataspace(cam3stream->isDataSpaceOverridden() ?
+                    cam3stream->getOriginalDataSpace() : src->data_space);
+        } else {
+            dst3_2.format = mapToPixelFormat(src->format);
+            dst3_2.dataSpace = mapToHidlDataspace(src->data_space);
+        }
+        dst3_4.v3_2 = dst3_2;
+        dst3_4.bufferSize = bufferSizes[i];
+        if (src->physical_camera_id != nullptr) {
+            dst3_4.physicalCameraId = src->physical_camera_id;
+        }
+        dst3_7.v3_4 = dst3_4;
+        dst3_7.groupId = cam3stream->getHalStreamGroupId();
+        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst3_7.sensorPixelModesUsed[j++] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+        }
+        if (src->dynamic_range_profile !=
+                    ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+            ALOGE("%s: Camera device doesn't support non-standard dynamic range profiles: %" PRIx64,
+                    __FUNCTION__, src->dynamic_range_profile);
+            return BAD_VALUE;
+        }
+        if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+            ALOGE("%s: Camera device doesn't support non-default stream use case %" PRId64 "!",
+                    __FUNCTION__, src->use_case);
+            return BAD_VALUE;
+        }
+        activeStreams.insert(streamId);
+        // Create Buffer ID map if necessary
+        mBufferRecords.tryCreateBufferCache(streamId);
+    }
+    // remove BufferIdMap for deleted streams
+    mBufferRecords.removeInactiveBufferCaches(activeStreams);
+
+    StreamConfigurationMode operationMode;
+    res = mapToStreamConfigurationMode(
+            (camera_stream_configuration_mode_t) config->operation_mode,
+            /*out*/ &operationMode);
+    if (res != OK) {
+        return res;
+    }
+    requestedConfiguration3_2.operationMode = operationMode;
+    requestedConfiguration3_4.operationMode = operationMode;
+    requestedConfiguration3_7.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
+    requestedConfiguration3_4.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            sessionParamSize);
+    requestedConfiguration3_7.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            sessionParamSize);
+
+    // Invoke configureStreams
+    device::V3_3::HalStreamConfiguration finalConfiguration;
+    device::V3_4::HalStreamConfiguration finalConfiguration3_4;
+    device::V3_6::HalStreamConfiguration finalConfiguration3_6;
+    common::V1_0::Status status;
+
+    auto configStream34Cb = [&status, &finalConfiguration3_4]
+            (common::V1_0::Status s, const device::V3_4::HalStreamConfiguration& halConfiguration) {
+                finalConfiguration3_4 = halConfiguration;
+                status = s;
+            };
+
+    auto configStream36Cb = [&status, &finalConfiguration3_6]
+            (common::V1_0::Status s, const device::V3_6::HalStreamConfiguration& halConfiguration) {
+                finalConfiguration3_6 = halConfiguration;
+                status = s;
+            };
+
+    auto postprocConfigStream34 = [&finalConfiguration, &finalConfiguration3_4]
+            (hardware::Return<void>& err) -> status_t {
+                if (!err.isOk()) {
+                    ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+                    return DEAD_OBJECT;
+                }
+                finalConfiguration.streams.resize(finalConfiguration3_4.streams.size());
+                for (size_t i = 0; i < finalConfiguration3_4.streams.size(); i++) {
+                    finalConfiguration.streams[i] = finalConfiguration3_4.streams[i].v3_3;
+                }
+                return OK;
+            };
+
+    auto postprocConfigStream36 = [&finalConfiguration, &finalConfiguration3_6]
+            (hardware::Return<void>& err) -> status_t {
+                if (!err.isOk()) {
+                    ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+                    return DEAD_OBJECT;
+                }
+                finalConfiguration.streams.resize(finalConfiguration3_6.streams.size());
+                for (size_t i = 0; i < finalConfiguration3_6.streams.size(); i++) {
+                    finalConfiguration.streams[i] = finalConfiguration3_6.streams[i].v3_4.v3_3;
+                }
+                return OK;
+            };
+
+    if (mHidlSession_3_7 != nullptr) {
+        ALOGV("%s: v3.7 device found", __FUNCTION__);
+        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
+        requestedConfiguration3_7.multiResolutionInputImage = config->input_is_multi_resolution;
+        auto err = mHidlSession_3_7->configureStreams_3_7(
+                requestedConfiguration3_7, configStream36Cb);
+        res = postprocConfigStream36(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_6 != nullptr) {
+        ALOGV("%s: v3.6 device found", __FUNCTION__);
+        device::V3_5::StreamConfiguration requestedConfiguration3_5;
+        requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
+        requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
+        auto err = mHidlSession_3_6->configureStreams_3_6(
+                requestedConfiguration3_5, configStream36Cb);
+        res = postprocConfigStream36(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_5 != nullptr) {
+        ALOGV("%s: v3.5 device found", __FUNCTION__);
+        device::V3_5::StreamConfiguration requestedConfiguration3_5;
+        requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
+        requestedConfiguration3_5.streamConfigCounter = mNextStreamConfigCounter++;
+        auto err = mHidlSession_3_5->configureStreams_3_5(
+                requestedConfiguration3_5, configStream34Cb);
+        res = postprocConfigStream34(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_4 != nullptr) {
+        // We do; use v3.4 for the call
+        ALOGV("%s: v3.4 device found", __FUNCTION__);
+        auto err = mHidlSession_3_4->configureStreams_3_4(
+                requestedConfiguration3_4, configStream34Cb);
+        res = postprocConfigStream34(err);
+        if (res != OK) {
+            return res;
+        }
+    } else if (mHidlSession_3_3 != nullptr) {
+        // We do; use v3.3 for the call
+        ALOGV("%s: v3.3 device found", __FUNCTION__);
+        auto err = mHidlSession_3_3->configureStreams_3_3(requestedConfiguration3_2,
+            [&status, &finalConfiguration]
+            (common::V1_0::Status s, const device::V3_3::HalStreamConfiguration& halConfiguration) {
+                finalConfiguration = halConfiguration;
+                status = s;
+            });
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+            return DEAD_OBJECT;
+        }
+    } else {
+        // We don't; use v3.2 call and construct a v3.3 HalStreamConfiguration
+        ALOGV("%s: v3.2 device found", __FUNCTION__);
+        HalStreamConfiguration finalConfiguration_3_2;
+        auto err = mHidlSession->configureStreams(requestedConfiguration3_2,
+                [&status, &finalConfiguration_3_2]
+                (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
+                    finalConfiguration_3_2 = halConfiguration;
+                    status = s;
+                });
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+            return DEAD_OBJECT;
+        }
+        finalConfiguration.streams.resize(finalConfiguration_3_2.streams.size());
+        for (size_t i = 0; i < finalConfiguration_3_2.streams.size(); i++) {
+            finalConfiguration.streams[i].v3_2 = finalConfiguration_3_2.streams[i];
+            finalConfiguration.streams[i].overrideDataSpace =
+                    requestedConfiguration3_2.streams[i].dataSpace;
+        }
+    }
+
+    if (status != common::V1_0::Status::OK ) {
+        return HidlProviderInfo::mapToStatusT(status);
+    }
+
+    // And convert output stream configuration from HIDL
+
+    for (size_t i = 0; i < config->num_streams; i++) {
+        camera3::camera_stream_t *dst = config->streams[i];
+        int streamId = Camera3Stream::cast(dst)->getId();
+
+        // Start scan at i, with the assumption that the stream order matches
+        size_t realIdx = i;
+        bool found = false;
+        size_t halStreamCount = finalConfiguration.streams.size();
+        for (size_t idx = 0; idx < halStreamCount; idx++) {
+            if (finalConfiguration.streams[realIdx].v3_2.id == streamId) {
+                found = true;
+                break;
+            }
+            realIdx = (realIdx >= halStreamCount - 1) ? 0 : realIdx + 1;
+        }
+        if (!found) {
+            ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+                    __FUNCTION__, streamId);
+            return INVALID_OPERATION;
+        }
+        device::V3_3::HalStream &src = finalConfiguration.streams[realIdx];
+        device::V3_6::HalStream &src_36 = finalConfiguration3_6.streams[realIdx];
+
+        Camera3Stream* dstStream = Camera3Stream::cast(dst);
+        int overrideFormat = mapToFrameworkFormat(src.v3_2.overrideFormat);
+        android_dataspace overrideDataSpace = mapToFrameworkDataspace(src.overrideDataSpace);
+
+        if (mHidlSession_3_6 != nullptr) {
+            dstStream->setOfflineProcessingSupport(src_36.supportOffline);
+        }
+
+        if (dstStream->getOriginalFormat() != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            dstStream->setFormatOverride(false);
+            dstStream->setDataSpaceOverride(false);
+            if (dst->format != overrideFormat) {
+                ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
+                        streamId, dst->format);
+            }
+            if (dst->data_space != overrideDataSpace) {
+                ALOGE("%s: Stream %d: DataSpace override not allowed for format 0x%x", __FUNCTION__,
+                        streamId, dst->format);
+            }
+        } else {
+            bool needFormatOverride =
+                    requestedConfiguration3_2.streams[i].format != src.v3_2.overrideFormat;
+            bool needDataspaceOverride =
+                    requestedConfiguration3_2.streams[i].dataSpace != src.overrideDataSpace;
+            // Override allowed with IMPLEMENTATION_DEFINED
+            dstStream->setFormatOverride(needFormatOverride);
+            dstStream->setDataSpaceOverride(needDataspaceOverride);
+            dst->format = overrideFormat;
+            dst->data_space = overrideDataSpace;
+        }
+
+        if (dst->stream_type == CAMERA_STREAM_INPUT) {
+            if (src.v3_2.producerUsage != 0) {
+                ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
+                        __FUNCTION__, streamId);
+                return INVALID_OPERATION;
+            }
+            dstStream->setUsage(
+                    mapConsumerToFrameworkUsage(src.v3_2.consumerUsage));
+        } else {
+            // OUTPUT
+            if (src.v3_2.consumerUsage != 0) {
+                ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
+                        __FUNCTION__, streamId);
+                return INVALID_OPERATION;
+            }
+            dstStream->setUsage(
+                    mapProducerToFrameworkUsage(src.v3_2.producerUsage));
+        }
+        dst->max_buffers = src.v3_2.maxBuffers;
+    }
+
+    return res;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::configureInjectedStreams(
+        const camera_metadata_t* sessionParams, camera_stream_configuration* config,
+        const std::vector<uint32_t>& bufferSizes,
+        const CameraMetadata& cameraCharacteristics) {
+    ATRACE_NAME("InjectionCameraHal::configureStreams");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    if (config->input_is_multi_resolution) {
+        ALOGE("%s: Injection camera device doesn't support multi-resolution input "
+                "stream", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    // Convert stream config to HIDL
+    std::set<int> activeStreams;
+    device::V3_2::StreamConfiguration requestedConfiguration3_2;
+    device::V3_4::StreamConfiguration requestedConfiguration3_4;
+    device::V3_7::StreamConfiguration requestedConfiguration3_7;
+    requestedConfiguration3_2.streams.resize(config->num_streams);
+    requestedConfiguration3_4.streams.resize(config->num_streams);
+    requestedConfiguration3_7.streams.resize(config->num_streams);
+    for (size_t i = 0; i < config->num_streams; i++) {
+        device::V3_2::Stream& dst3_2 = requestedConfiguration3_2.streams[i];
+        device::V3_4::Stream& dst3_4 = requestedConfiguration3_4.streams[i];
+        device::V3_7::Stream& dst3_7 = requestedConfiguration3_7.streams[i];
+        camera3::camera_stream_t* src = config->streams[i];
+
+        Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        cam3stream->setBufferFreedListener(this);
+        int streamId = cam3stream->getId();
+        StreamType streamType;
+        switch (src->stream_type) {
+            case CAMERA_STREAM_OUTPUT:
+                streamType = StreamType::OUTPUT;
+                break;
+            case CAMERA_STREAM_INPUT:
+                streamType = StreamType::INPUT;
+                break;
+            default:
+                ALOGE("%s: Stream %d: Unsupported stream type %d", __FUNCTION__,
+                        streamId, config->streams[i]->stream_type);
+            return BAD_VALUE;
+        }
+        dst3_2.id = streamId;
+        dst3_2.streamType = streamType;
+        dst3_2.width = src->width;
+        dst3_2.height = src->height;
+        dst3_2.usage = mapToConsumerUsage(cam3stream->getUsage());
+        dst3_2.rotation =
+                mapToStreamRotation((camera_stream_rotation_t)src->rotation);
+        // For HidlSession version 3.5 or newer, the format and dataSpace sent
+        // to HAL are original, not the overridden ones.
+        if (mHidlSession_3_5 != nullptr) {
+            dst3_2.format = mapToPixelFormat(cam3stream->isFormatOverridden()
+                                            ? cam3stream->getOriginalFormat()
+                                            : src->format);
+            dst3_2.dataSpace =
+                    mapToHidlDataspace(cam3stream->isDataSpaceOverridden()
+                                    ? cam3stream->getOriginalDataSpace()
+                                    : src->data_space);
+        } else {
+            dst3_2.format = mapToPixelFormat(src->format);
+            dst3_2.dataSpace = mapToHidlDataspace(src->data_space);
+        }
+        dst3_4.v3_2 = dst3_2;
+        dst3_4.bufferSize = bufferSizes[i];
+        if (src->physical_camera_id != nullptr) {
+            dst3_4.physicalCameraId = src->physical_camera_id;
+        }
+        dst3_7.v3_4 = dst3_4;
+        dst3_7.groupId = cam3stream->getHalStreamGroupId();
+        dst3_7.sensorPixelModesUsed.resize(src->sensor_pixel_modes_used.size());
+        size_t j = 0;
+        for (int mode : src->sensor_pixel_modes_used) {
+            dst3_7.sensorPixelModesUsed[j++] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+        }
+        activeStreams.insert(streamId);
+        // Create Buffer ID map if necessary
+        mBufferRecords.tryCreateBufferCache(streamId);
+    }
+    // remove BufferIdMap for deleted streams
+    mBufferRecords.removeInactiveBufferCaches(activeStreams);
+
+    StreamConfigurationMode operationMode;
+    res = mapToStreamConfigurationMode(
+            (camera_stream_configuration_mode_t)config->operation_mode,
+            /*out*/ &operationMode);
+    if (res != OK) {
+        return res;
+    }
+    requestedConfiguration3_7.operationMode = operationMode;
+    size_t sessionParamSize = get_camera_metadata_size(sessionParams);
+    requestedConfiguration3_7.operationMode = operationMode;
+    requestedConfiguration3_7.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            sessionParamSize);
+
+    // See which version of HAL we have
+    if (mHidlSession_3_7 != nullptr) {
+        requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
+        requestedConfiguration3_7.multiResolutionInputImage =
+                config->input_is_multi_resolution;
+
+        const camera_metadata_t* rawMetadata = cameraCharacteristics.getAndLock();
+        ::android::hardware::camera::device::V3_2::CameraMetadata hidlChars = {};
+        hidlChars.setToExternal(
+                reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(rawMetadata)),
+                get_camera_metadata_size(rawMetadata));
+        cameraCharacteristics.unlock(rawMetadata);
+
+        sp<hardware::camera::device::V3_7::ICameraInjectionSession>
+                hidlInjectionSession_3_7;
+        auto castInjectionResult_3_7 =
+                device::V3_7::ICameraInjectionSession::castFrom(mHidlSession_3_7);
+        if (castInjectionResult_3_7.isOk()) {
+            hidlInjectionSession_3_7 = castInjectionResult_3_7;
+        } else {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__,
+                    castInjectionResult_3_7.description().c_str());
+            return DEAD_OBJECT;
+        }
+
+        auto err = hidlInjectionSession_3_7->configureInjectionStreams(
+                requestedConfiguration3_7, hidlChars);
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__,
+                    err.description().c_str());
+            return DEAD_OBJECT;
+        }
+    } else {
+        ALOGE("%s: mHidlSession_3_7 does not exist, the lowest version of injection "
+                "session is 3.7", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    return res;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::wrapAsHidlRequest(camera_capture_request_t* request,
+        /*out*/device::V3_2::CaptureRequest* captureRequest,
+        /*out*/std::vector<native_handle_t*>* handlesCreated,
+        /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers) {
+    ATRACE_CALL();
+    if (captureRequest == nullptr || handlesCreated == nullptr || inflightBuffers == nullptr) {
+        ALOGE("%s: captureRequest (%p), handlesCreated (%p), and inflightBuffers(%p) "
+                "must not be null", __FUNCTION__, captureRequest, handlesCreated, inflightBuffers);
+        return BAD_VALUE;
+    }
+
+    captureRequest->frameNumber = request->frame_number;
+
+    captureRequest->fmqSettingsSize = 0;
+
+    {
+        if (request->input_buffer != nullptr) {
+            int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
+            buffer_handle_t buf = *(request->input_buffer->buffer);
+            auto pair = getBufferId(buf, streamId);
+            bool isNewBuffer = pair.first;
+            uint64_t bufferId = pair.second;
+            captureRequest->inputBuffer.streamId = streamId;
+            captureRequest->inputBuffer.bufferId = bufferId;
+            captureRequest->inputBuffer.buffer = (isNewBuffer) ? buf : nullptr;
+            captureRequest->inputBuffer.status = BufferStatus::OK;
+            native_handle_t *acquireFence = nullptr;
+            if (request->input_buffer->acquire_fence != -1) {
+                acquireFence = native_handle_create(1,0);
+                acquireFence->data[0] = request->input_buffer->acquire_fence;
+                handlesCreated->push_back(acquireFence);
+            }
+            captureRequest->inputBuffer.acquireFence = acquireFence;
+            captureRequest->inputBuffer.releaseFence = nullptr;
+
+            mBufferRecords.pushInflightBuffer(captureRequest->frameNumber, streamId,
+                    request->input_buffer->buffer);
+            inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
+        } else {
+            captureRequest->inputBuffer.streamId = -1;
+            captureRequest->inputBuffer.bufferId = BUFFER_ID_NO_BUFFER;
+        }
+
+        captureRequest->outputBuffers.resize(request->num_output_buffers);
+        for (size_t i = 0; i < request->num_output_buffers; i++) {
+            const camera_stream_buffer_t *src = request->output_buffers + i;
+            StreamBuffer &dst = captureRequest->outputBuffers[i];
+            int32_t streamId = Camera3Stream::cast(src->stream)->getId();
+            if (src->buffer != nullptr) {
+                buffer_handle_t buf = *(src->buffer);
+                auto pair = getBufferId(buf, streamId);
+                bool isNewBuffer = pair.first;
+                dst.bufferId = pair.second;
+                dst.buffer = isNewBuffer ? buf : nullptr;
+                native_handle_t *acquireFence = nullptr;
+                if (src->acquire_fence != -1) {
+                    acquireFence = native_handle_create(1,0);
+                    acquireFence->data[0] = src->acquire_fence;
+                    handlesCreated->push_back(acquireFence);
+                }
+                dst.acquireFence = acquireFence;
+            } else if (mUseHalBufManager) {
+                // HAL buffer management path
+                dst.bufferId = BUFFER_ID_NO_BUFFER;
+                dst.buffer = nullptr;
+                dst.acquireFence = nullptr;
+            } else {
+                ALOGE("%s: cannot send a null buffer in capture request!", __FUNCTION__);
+                return BAD_VALUE;
+            }
+            dst.streamId = streamId;
+            dst.status = BufferStatus::OK;
+            dst.releaseFence = nullptr;
+
+            // Output buffers are empty when using HAL buffer manager
+            if (!mUseHalBufManager) {
+                mBufferRecords.pushInflightBuffer(
+                        captureRequest->frameNumber, streamId, src->buffer);
+                inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
+            }
+        }
+    }
+    return OK;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::flush() {
+    ATRACE_NAME("CameraHal::flush");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    auto err = mHidlSession->flush();
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        res = DEAD_OBJECT;
+    } else {
+        res = HidlProviderInfo::mapToStatusT(err);
+    }
+
+    return res;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::dump(int /*fd*/) {
+    ATRACE_NAME("CameraHal::dump");
+    if (!valid()) return INVALID_OPERATION;
+
+    // Handled by CameraProviderManager::dump
+
+    return OK;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::repeatingRequestEnd(uint32_t /*frameNumber*/,
+        const std::vector<int32_t> &/*streamIds*/) {
+    ATRACE_NAME("CameraHal::repeatingRequestEnd");
+    return INVALID_OPERATION;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::close() {
+    ATRACE_NAME("CameraHal::close()");
+    if (!valid()) return INVALID_OPERATION;
+    status_t res = OK;
+
+    auto err = mHidlSession->close();
+    // Interface will be dead shortly anyway, so don't log errors
+    if (!err.isOk()) {
+        res = DEAD_OBJECT;
+    }
+
+    return res;
+}
+
+void HidlCamera3Device::HidlHalInterface::signalPipelineDrain(const std::vector<int>& streamIds) {
+    ATRACE_NAME("CameraHal::signalPipelineDrain");
+    if (!valid() || mHidlSession_3_5 == nullptr) {
+        ALOGE("%s called on invalid camera!", __FUNCTION__);
+        return;
+    }
+
+    auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter - 1);
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        return;
+    }
+}
+
+status_t HidlCamera3Device::HidlHalInterface::processBatchCaptureRequests(
+        std::vector<camera_capture_request_t*>& requests,/*out*/uint32_t* numRequestProcessed) {
+    ATRACE_NAME("CameraHal::processBatchCaptureRequests");
+    if (!valid()) return INVALID_OPERATION;
+
+    sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
+    sp<device::V3_7::ICameraDeviceSession> hidlSession_3_7;
+    auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_7.isOk()) {
+        hidlSession_3_7 = castResult_3_7;
+    }
+    auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_4.isOk()) {
+        hidlSession_3_4 = castResult_3_4;
+    }
+
+    hardware::hidl_vec<device::V3_2::CaptureRequest> captureRequests;
+    hardware::hidl_vec<device::V3_4::CaptureRequest> captureRequests_3_4;
+    hardware::hidl_vec<device::V3_7::CaptureRequest> captureRequests_3_7;
+    size_t batchSize = requests.size();
+    if (hidlSession_3_7 != nullptr) {
+        captureRequests_3_7.resize(batchSize);
+    } else if (hidlSession_3_4 != nullptr) {
+        captureRequests_3_4.resize(batchSize);
+    } else {
+        captureRequests.resize(batchSize);
+    }
+    std::vector<native_handle_t*> handlesCreated;
+    std::vector<std::pair<int32_t, int32_t>> inflightBuffers;
+
+    status_t res = OK;
+    for (size_t i = 0; i < batchSize; i++) {
+        if (hidlSession_3_7 != nullptr) {
+            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_7[i].v3_4.v3_2,
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
+        } else if (hidlSession_3_4 != nullptr) {
+            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
+        } else {
+            res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests[i],
+                    /*out*/&handlesCreated, /*out*/&inflightBuffers);
+        }
+        if (res != OK) {
+            mBufferRecords.popInflightBuffers(inflightBuffers);
+            cleanupNativeHandles(&handlesCreated);
+            return res;
+        }
+    }
+
+    std::vector<device::V3_2::BufferCache> cachesToRemove;
+    {
+        std::lock_guard<std::mutex> lock(mFreedBuffersLock);
+        for (auto& pair : mFreedBuffers) {
+            // The stream might have been removed since onBufferFreed
+            if (mBufferRecords.isStreamCached(pair.first)) {
+                cachesToRemove.push_back({pair.first, pair.second});
+            }
+        }
+        mFreedBuffers.clear();
+    }
+
+    common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
+    *numRequestProcessed = 0;
+
+    // Write metadata to FMQ.
+    for (size_t i = 0; i < batchSize; i++) {
+        camera_capture_request_t* request = requests[i];
+        device::V3_2::CaptureRequest* captureRequest;
+        if (hidlSession_3_7 != nullptr) {
+            captureRequest = &captureRequests_3_7[i].v3_4.v3_2;
+        } else if (hidlSession_3_4 != nullptr) {
+            captureRequest = &captureRequests_3_4[i].v3_2;
+        } else {
+            captureRequest = &captureRequests[i];
+        }
+
+        if (request->settings != nullptr) {
+            size_t settingsSize = get_camera_metadata_size(request->settings);
+            if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
+                    reinterpret_cast<const uint8_t*>(request->settings), settingsSize)) {
+                captureRequest->settings.resize(0);
+                captureRequest->fmqSettingsSize = settingsSize;
+            } else {
+                if (mRequestMetadataQueue != nullptr) {
+                    ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
+                }
+                captureRequest->settings.setToExternal(
+                        reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
+                                request->settings)),
+                        get_camera_metadata_size(request->settings));
+                captureRequest->fmqSettingsSize = 0u;
+            }
+        } else {
+            // A null request settings maps to a size-0 CameraMetadata
+            captureRequest->settings.resize(0);
+            captureRequest->fmqSettingsSize = 0u;
+        }
+
+        // hidl session 3.7 specific handling.
+        if (hidlSession_3_7 != nullptr) {
+            captureRequests_3_7[i].inputWidth = request->input_width;
+            captureRequests_3_7[i].inputHeight = request->input_height;
+        }
+
+        // hidl session 3.7 and 3.4 specific handling.
+        if (hidlSession_3_7 != nullptr || hidlSession_3_4 != nullptr) {
+            hardware::hidl_vec<device::V3_4::PhysicalCameraSetting>& physicalCameraSettings =
+                    (hidlSession_3_7 != nullptr) ?
+                    captureRequests_3_7[i].v3_4.physicalCameraSettings :
+                    captureRequests_3_4[i].physicalCameraSettings;
+            physicalCameraSettings.resize(request->num_physcam_settings);
+            for (size_t j = 0; j < request->num_physcam_settings; j++) {
+                if (request->physcam_settings != nullptr) {
+                    size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
+                    if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
+                                reinterpret_cast<const uint8_t*>(request->physcam_settings[j]),
+                                settingsSize)) {
+                        physicalCameraSettings[j].settings.resize(0);
+                        physicalCameraSettings[j].fmqSettingsSize = settingsSize;
+                    } else {
+                        if (mRequestMetadataQueue != nullptr) {
+                            ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
+                        }
+                        physicalCameraSettings[j].settings.setToExternal(
+                                reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
+                                        request->physcam_settings[j])),
+                                get_camera_metadata_size(request->physcam_settings[j]));
+                        physicalCameraSettings[j].fmqSettingsSize = 0u;
+                    }
+                } else {
+                    physicalCameraSettings[j].fmqSettingsSize = 0u;
+                    physicalCameraSettings[j].settings.resize(0);
+                }
+                physicalCameraSettings[j].physicalCameraId = request->physcam_id[j];
+            }
+        }
+    }
+
+    hardware::details::return_status err;
+    auto resultCallback =
+        [&status, &numRequestProcessed] (auto s, uint32_t n) {
+                status = s;
+                *numRequestProcessed = n;
+        };
+    if (hidlSession_3_7 != nullptr) {
+        err = hidlSession_3_7->processCaptureRequest_3_7(captureRequests_3_7, cachesToRemove,
+                                                         resultCallback);
+    } else if (hidlSession_3_4 != nullptr) {
+        err = hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
+                                                         resultCallback);
+    } else {
+        err = mHidlSession->processCaptureRequest(captureRequests, cachesToRemove,
+                                                  resultCallback);
+    }
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        status = common::V1_0::Status::CAMERA_DISCONNECTED;
+    }
+
+    if (status == common::V1_0::Status::OK && *numRequestProcessed != batchSize) {
+        ALOGE("%s: processCaptureRequest returns OK but processed %d/%zu requests",
+                __FUNCTION__, *numRequestProcessed, batchSize);
+        status = common::V1_0::Status::INTERNAL_ERROR;
+    }
+
+    res = HidlProviderInfo::mapToStatusT(status);
+    if (res == OK) {
+        if (mHidlSession->isRemote()) {
+            // Only close acquire fence FDs when the HIDL transaction succeeds (so the FDs have been
+            // sent to camera HAL processes)
+            cleanupNativeHandles(&handlesCreated, /*closeFd*/true);
+        } else {
+            // In passthrough mode the FDs are now owned by HAL
+            cleanupNativeHandles(&handlesCreated);
+        }
+    } else {
+        mBufferRecords.popInflightBuffers(inflightBuffers);
+        cleanupNativeHandles(&handlesCreated);
+    }
+    return res;
+}
+
+status_t HidlCamera3Device::HidlHalInterface::switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
+        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords) {
+    ATRACE_NAME("CameraHal::switchToOffline");
+    if (!valid() || mHidlSession_3_6 == nullptr) {
+        ALOGE("%s called on invalid camera!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    if (offlineSessionInfo == nullptr || offlineSession == nullptr || bufferRecords == nullptr) {
+        ALOGE("%s: output arguments must not be null!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    common::V1_0::Status status = common::V1_0::Status::INTERNAL_ERROR;
+    auto resultCallback =
+        [&status, &offlineSessionInfo, &offlineSession] (auto s, auto info, auto session) {
+                status = s;
+                *offlineSessionInfo = info;
+                *offlineSession = session;
+        };
+    auto err = mHidlSession_3_6->switchToOffline(streamsToKeep, resultCallback);
+
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+        return DEAD_OBJECT;
+    }
+
+    status_t ret = HidlProviderInfo::mapToStatusT(status);
+    if (ret != OK) {
+        return ret;
+    }
+
+    return verifyBufferCaches(offlineSessionInfo, bufferRecords);
+}
+
+HidlCamera3Device::HidlRequestThread::HidlRequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
+                sp<HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute) :
+          RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
+                  supportCameraMute) {}
+
+status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
+        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords) {
+    Mutex::Autolock l(mRequestLock);
+    clearRepeatingRequestsLocked(/*lastFrameNumber*/nullptr);
+
+    // Wait until request thread is fully stopped
+    // TBD: check if request thread is being paused by other APIs (shouldn't be)
+
+    // We could also check for mRepeatingRequests.empty(), but the API interface
+    // is serialized by Camera3Device::mInterfaceLock so no one should be able to submit any
+    // new requests during the call; hence skip that check.
+    bool queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
+    while (!queueEmpty) {
+        status_t res = mRequestSubmittedSignal.waitRelative(mRequestLock, kRequestSubmitTimeout);
+        if (res == TIMED_OUT) {
+            ALOGE("%s: request thread failed to submit one request within timeout!", __FUNCTION__);
+            return res;
+        } else if (res != OK) {
+            ALOGE("%s: request thread failed to submit a request: %s (%d)!",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+        queueEmpty = mNextRequests.empty() && mRequestQueue.empty();
+    }
+    return (static_cast<HidlHalInterface *>(mInterface.get()))->switchToOffline(
+            streamsToKeep, offlineSessionInfo, offlineSession, bufferRecords);
+}
+
+status_t HidlCamera3Device::HidlCamera3DeviceInjectionMethods::injectionInitialize(
+        const String8& injectedCamId, sp<CameraProviderManager> manager,
+        const sp<android::hardware::camera::device::V3_2::ICameraDeviceCallback>&
+                callback) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mInjectionLock);
+
+    if (manager == nullptr) {
+        ALOGE("%s: manager does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    mInjectedCamId = injectedCamId;
+    sp<ICameraDeviceSession> session;
+    ATRACE_BEGIN("Injection CameraHal::openSession");
+    status_t res = manager->openHidlSession(injectedCamId.string(), callback,
+                                          /*out*/ &session);
+    ATRACE_END();
+    if (res != OK) {
+        ALOGE("Injection camera could not open camera session: %s (%d)",
+                strerror(-res), res);
+        return res;
+    }
+
+    std::shared_ptr<RequestMetadataQueue> queue;
+    auto requestQueueRet =
+        session->getCaptureRequestMetadataQueue([&queue](const auto& descriptor) {
+            queue = std::make_shared<RequestMetadataQueue>(descriptor);
+            if (!queue->isValid() || queue->availableToWrite() <= 0) {
+                ALOGE("Injection camera HAL returns empty request metadata fmq, not "
+                        "use it");
+                queue = nullptr;
+                // don't use the queue onwards.
+            }
+        });
+    if (!requestQueueRet.isOk()) {
+        ALOGE("Injection camera transaction error when getting request metadata fmq: "
+                "%s, not use it", requestQueueRet.description().c_str());
+        return DEAD_OBJECT;
+    }
+
+    std::unique_ptr<ResultMetadataQueue>& resQueue = mInjectionResultMetadataQueue;
+    auto resultQueueRet = session->getCaptureResultMetadataQueue(
+        [&resQueue](const auto& descriptor) {
+            resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+            if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+                ALOGE("Injection camera HAL returns empty result metadata fmq, not use "
+                        "it");
+                resQueue = nullptr;
+                // Don't use the resQueue onwards.
+            }
+        });
+    if (!resultQueueRet.isOk()) {
+        ALOGE("Injection camera transaction error when getting result metadata queue "
+                "from camera session: %s", resultQueueRet.description().c_str());
+        return DEAD_OBJECT;
+    }
+    IF_ALOGV() {
+        session->interfaceChain(
+                [](::android::hardware::hidl_vec<::android::hardware::hidl_string>
+                        interfaceChain) {
+                        ALOGV("Injection camera session interface chain:");
+                        for (const auto& iface : interfaceChain) {
+                            ALOGV("  %s", iface.c_str());
+                        }
+                });
+    }
+
+    ALOGV("%s: Injection camera interface = new HalInterface()", __FUNCTION__);
+
+    mInjectedCamHalInterface =
+            new HidlHalInterface(session, queue, parent->mUseHalBufManager,
+                       parent->mSupportOfflineProcessing);
+    if (mInjectedCamHalInterface == nullptr) {
+        ALOGE("%s: mInjectedCamHalInterface does not exist!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    return OK;
+}
+
+status_t HidlCamera3Device::HidlCamera3DeviceInjectionMethods::replaceHalInterface(
+        sp<HalInterface> newHalInterface, bool keepBackup) {
+    Mutex::Autolock lock(mInjectionLock);
+    if (newHalInterface.get() == nullptr) {
+        ALOGE("%s: The newHalInterface does not exist, to stop replacing.",
+                __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    sp<Camera3Device> parent = mParent.promote();
+    if (parent == nullptr) {
+        ALOGE("%s: parent does not exist!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (newHalInterface->getTransportType() != IPCTransport::HIDL) {
+        ALOGE("%s Replacing HIDL HalInterface with another transport unsupported", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    HidlCamera3Device *hidlParent = static_cast<HidlCamera3Device *>(parent.get());
+    if (keepBackup) {
+        if (mBackupHalInterface == nullptr) {
+            mBackupHalInterface = parent->mInterface;
+        }
+        if (mBackupResultMetadataQueue == nullptr) {
+            mBackupResultMetadataQueue = std::move(hidlParent->mResultMetadataQueue);
+            hidlParent->mResultMetadataQueue = std::move(mInjectionResultMetadataQueue);
+        }
+    } else {
+        mBackupHalInterface = nullptr;
+        hidlParent->mResultMetadataQueue = std::move(mBackupResultMetadataQueue);
+        mBackupResultMetadataQueue = nullptr;
+    }
+    parent->mInterface = newHalInterface;
+
+    return OK;
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
new file mode 100644
index 0000000..2e98fe0
--- /dev/null
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -0,0 +1,237 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_HIDLCAMERA3DEVICE_H
+#define ANDROID_SERVERS_HIDLCAMERA3DEVICE_H
+
+#include "../Camera3Device.h"
+#include "HidlCamera3OutputUtils.h"
+
+namespace android {
+
+
+/**
+ * CameraDevice for HIDL HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 or higher.
+ */
+class HidlCamera3Device :
+            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback,
+            public Camera3Device {
+  public:
+
+   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass,
+          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, legacyClient) { }
+
+    virtual ~HidlCamera3Device() {}
+
+   /**
+     * Helper functions to map between framework and HIDL values
+     */
+    static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
+    static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
+            android_dataspace dataSpace);
+    static hardware::camera::device::V3_2::BufferUsageFlags mapToConsumerUsage(uint64_t usage);
+    static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
+            camera_stream_rotation_t rotation);
+    // Returns a negative error code if the passed-in operation mode is not valid.
+    static status_t mapToStreamConfigurationMode(camera_stream_configuration_mode_t operationMode,
+            /*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
+    static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
+    static android_dataspace mapToFrameworkDataspace(
+            hardware::camera::device::V3_2::DataspaceFlags);
+    static uint64_t mapConsumerToFrameworkUsage(
+            hardware::camera::device::V3_2::BufferUsageFlags usage);
+    static uint64_t mapProducerToFrameworkUsage(
+            hardware::camera::device::V3_2::BufferUsageFlags usage);
+
+    status_t initialize(sp<CameraProviderManager> manager, const String8& monitorTags) override;
+
+    /**
+     * Implementation of android::hardware::camera::device::V3_5::ICameraDeviceCallback
+     */
+
+    hardware::Return<void> processCaptureResult_3_4(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_4::CaptureResult>& results) override;
+    hardware::Return<void> processCaptureResult(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_2::CaptureResult>& results) override;
+    hardware::Return<void> notify(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_2::NotifyMsg>& msgs) override;
+
+    hardware::Return<void> requestStreamBuffers(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+            requestStreamBuffers_cb _hidl_cb) override;
+
+    hardware::Return<void> returnStreamBuffers(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
+
+    // Handle one notify message
+    void notify(const hardware::camera::device::V3_2::NotifyMsg& msg);
+
+    status_t switchToOffline(const std::vector<int32_t>& streamsToKeep,
+            /*out*/ sp<CameraOfflineSessionBase>* session) override;
+
+    using RequestMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
+
+    class HidlHalInterface : public Camera3Device::HalInterface {
+     public:
+        HidlHalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session,
+                     std::shared_ptr<RequestMetadataQueue> queue,
+                     bool useHalBufManager, bool supportOfflineProcessing);
+
+        virtual IPCTransport getTransportType() const override { return IPCTransport::HIDL; }
+        // Returns true if constructed with a valid device or session, and not yet cleared
+        virtual bool valid() override;
+
+        // Reset this HalInterface object (does not call close())
+        virtual void clear() override;
+
+        // Calls into the HAL interface
+
+        // Caller takes ownership of requestTemplate
+        virtual status_t constructDefaultRequestSettings(camera_request_template templateId,
+                /*out*/ camera_metadata_t **requestTemplate) override;
+
+        virtual status_t configureStreams(const camera_metadata_t *sessionParams,
+                /*inout*/ camera_stream_configuration_t *config,
+                const std::vector<uint32_t>& bufferSizes) override;
+
+        // The injection camera configures the streams to hal.
+        virtual status_t configureInjectedStreams(
+                const camera_metadata_t* sessionParams,
+                /*inout*/ camera_stream_configuration_t* config,
+                const std::vector<uint32_t>& bufferSizes,
+                const CameraMetadata& cameraCharacteristics) override;
+
+        // When the call succeeds, the ownership of acquire fences in requests is transferred to
+        // HalInterface. More specifically, the current implementation will send the fence to
+        // HAL process and close the FD in cameraserver process. When the call fails, the ownership
+        // of the acquire fence still belongs to the caller.
+        virtual status_t processBatchCaptureRequests(
+                std::vector<camera_capture_request_t*>& requests,
+                /*out*/uint32_t* numRequestProcessed) override;
+        virtual status_t flush() override;
+        virtual status_t dump(int fd) override;
+        virtual status_t close() override;
+
+        virtual void signalPipelineDrain(const std::vector<int>& streamIds) override;
+        virtual bool isReconfigurationRequired(CameraMetadata& oldSessionParams,
+                CameraMetadata& newSessionParams) override;
+
+        virtual status_t repeatingRequestEnd(uint32_t frameNumber,
+                const std::vector<int32_t> &streamIds) override;
+
+        status_t switchToOffline(
+        const std::vector<int32_t>& streamsToKeep,
+        /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
+        /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+        /*out*/camera3::BufferRecords* bufferRecords);
+
+     private:
+
+        // Always valid
+        sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
+        // Valid if ICameraDeviceSession is @3.3 or newer
+        sp<hardware::camera::device::V3_3::ICameraDeviceSession> mHidlSession_3_3;
+        // Valid if ICameraDeviceSession is @3.4 or newer
+        sp<hardware::camera::device::V3_4::ICameraDeviceSession> mHidlSession_3_4;
+        // Valid if ICameraDeviceSession is @3.5 or newer
+        sp<hardware::camera::device::V3_5::ICameraDeviceSession> mHidlSession_3_5;
+        // Valid if ICameraDeviceSession is @3.6 or newer
+        sp<hardware::camera::device::V3_6::ICameraDeviceSession> mHidlSession_3_6;
+        // Valid if ICameraDeviceSession is @3.7 or newer
+        sp<hardware::camera::device::V3_7::ICameraDeviceSession> mHidlSession_3_7;
+
+        std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
+
+        // The output HIDL request still depends on input camera_capture_request_t
+        // Do not free input camera_capture_request_t before output HIDL request
+        status_t wrapAsHidlRequest(camera_capture_request_t* in,
+                /*out*/hardware::camera::device::V3_2::CaptureRequest* out,
+                /*out*/std::vector<native_handle_t*>* handlesCreated,
+                /*out*/std::vector<std::pair<int32_t, int32_t>>* inflightBuffers);
+    }; // class HidlHalInterface
+
+    class HidlRequestThread : public Camera3Device::RequestThread {
+      public:
+        HidlRequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
+                sp<HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute);
+
+        status_t switchToOffline(
+                const std::vector<int32_t>& streamsToKeep,
+                /*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
+                /*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
+                /*out*/camera3::BufferRecords* bufferRecords);
+    }; // class HidlRequestThread
+
+    class HidlCamera3DeviceInjectionMethods : public Camera3DeviceInjectionMethods {
+     public:
+        // Initialize the injection camera and generate an hal interface.
+        status_t injectionInitialize(
+                const String8& injectedCamId, sp<CameraProviderManager> manager,
+                const sp<
+                    android::hardware::camera::device::V3_2 ::ICameraDeviceCallback>&
+                    callback);
+        HidlCamera3DeviceInjectionMethods(wp<Camera3Device> parent) :
+                Camera3DeviceInjectionMethods(parent) { };
+        ~HidlCamera3DeviceInjectionMethods() {}
+     private:
+        // Backup of the original camera hal result FMQ.
+        std::unique_ptr<ResultMetadataQueue> mBackupResultMetadataQueue;
+
+        // FMQ writes the result for the injection camera. Must be guarded by
+        // mProcessCaptureResultLock.
+        std::unique_ptr<ResultMetadataQueue> mInjectionResultMetadataQueue;
+
+        // Use injection camera hal interface to replace and backup original
+        // camera hal interface.
+        virtual status_t replaceHalInterface(sp<HalInterface> newHalInterface,
+                bool keepBackup) override;
+    };
+
+  private:
+    template<typename NotifyMsgType>
+    hardware::Return<void> notifyHelper(
+            const hardware::hidl_vec<NotifyMsgType>& msgs);
+
+    virtual status_t injectionCameraInitialize(const String8 &injectCamId,
+            sp<CameraProviderManager> manager) override;
+
+    virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> parent,
+                sp<camera3::StatusTracker> statusTracker,
+                sp<HalInterface> interface,
+                const Vector<int32_t>& sessionParamKeys,
+                bool useHalBufManager,
+                bool supportCameraMute) override;
+
+    virtual sp<Camera3DeviceInjectionMethods>
+            createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
+
+    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+    std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+}; // class HidlCamera3Device
+
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
new file mode 100644
index 0000000..5c97f0e
--- /dev/null
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Hidl-Camera3-OffLnSsn"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <inttypes.h>
+
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include "device3/hidl/HidlCamera3OfflineSession.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/hidl/HidlCamera3OutputUtils.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3SharedOutputStream.h"
+#include "utils/CameraTraces.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+
+HidlCamera3OfflineSession::~HidlCamera3OfflineSession() {
+    ATRACE_CALL();
+    ALOGV("%s: Tearing down hidl offline session for camera id %s", __FUNCTION__, mId.string());
+    HidlCamera3OfflineSession::disconnectSession();
+}
+
+status_t HidlCamera3OfflineSession::initialize(wp<NotificationListener> listener) {
+    ATRACE_CALL();
+
+    if (mSession == nullptr) {
+        ALOGE("%s: HIDL session is null!", __FUNCTION__);
+        return DEAD_OBJECT;
+    }
+
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+
+        mListener = listener;
+
+        // setup result FMQ
+        std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
+        auto resultQueueRet = mSession->getCaptureResultMetadataQueue(
+            [&resQueue](const auto& descriptor) {
+                resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
+                if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
+                    ALOGE("HAL returns empty result metadata fmq, not use it");
+                    resQueue = nullptr;
+                    // Don't use resQueue onwards.
+                }
+            });
+        if (!resultQueueRet.isOk()) {
+            ALOGE("Transaction error when getting result metadata queue from camera session: %s",
+                    resultQueueRet.description().c_str());
+            return DEAD_OBJECT;
+        }
+        mStatus = STATUS_ACTIVE;
+    }
+
+    mSession->setCallback(this);
+
+    return OK;
+}
+
+hardware::Return<void> HidlCamera3OfflineSession::processCaptureResult_3_4(
+        const hardware::hidl_vec<
+                hardware::camera::device::V3_4::CaptureResult>& results) {
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+        listener = mListener.promote();
+    }
+
+    HidlCaptureOutputStates states {
+      {mId,
+        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+    };
+
+    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
+    }
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3OfflineSession::processCaptureResult(
+        const hardware::hidl_vec<
+                hardware::camera::device::V3_2::CaptureResult>& results) {
+    // TODO: changed impl to call into processCaptureResult_3_4 instead?
+    //       might need to figure how to reduce copy though.
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+        listener = mListener.promote();
+    }
+
+    hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+
+    HidlCaptureOutputStates states {
+      {mId,
+        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+    };
+
+    std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
+    for (const auto& result : results) {
+        processOneCaptureResultLocked(states, result, noPhysMetadata);
+    }
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3OfflineSession::notify(
+        const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
+    sp<NotificationListener> listener;
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+        listener = mListener.promote();
+    }
+
+    HidlCaptureOutputStates states {
+      {mId,
+        mOfflineReqsLock, mLastCompletedRegularFrameNumber,
+        mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
+        mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
+        mNextShutterFrameNumber,
+        mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
+        mNextResultFrameNumber,
+        mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
+        mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
+        mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
+    };
+    for (const auto& msg : msgs) {
+        camera3::notify(states, msg);
+    }
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3OfflineSession::requestStreamBuffers(
+        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+        requestStreamBuffers_cb _hidl_cb) {
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+    }
+
+    RequestBufferStates states {
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        *this, mBufferRecords, *this};
+    camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
+    return hardware::Void();
+}
+
+hardware::Return<void> HidlCamera3OfflineSession::returnStreamBuffers(
+        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+    {
+        std::lock_guard<std::mutex> lock(mLock);
+        if (mStatus != STATUS_ACTIVE) {
+            ALOGE("%s called in wrong state %d", __FUNCTION__, mStatus);
+            return hardware::Void();
+        }
+    }
+
+    ReturnBufferStates states {
+        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, mBufferRecords};
+
+    camera3::returnStreamBuffers(states, buffers);
+    return hardware::Void();
+}
+
+void HidlCamera3OfflineSession::disconnectSession() {
+  // TODO: Make sure this locking is correct.
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mSession != nullptr) {
+      mSession->close();
+  }
+  mSession.clear();
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
new file mode 100644
index 0000000..597cc5d
--- /dev/null
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_HIDL_CAMERA3OFFLINESESSION_H
+#define ANDROID_SERVERS_HIDL_CAMERA3OFFLINESESSION_H
+
+#include <memory>
+#include <mutex>
+
+#include <utils/String8.h>
+#include <utils/String16.h>
+
+#include <android/hardware/camera/device/3.6/ICameraOfflineSession.h>
+
+#include <fmq/MessageQueue.h>
+
+#include "HidlCamera3OutputUtils.h"
+#include "common/CameraOfflineSessionBase.h"
+
+#include "device3/Camera3BufferManager.h"
+#include "device3/Camera3OfflineSession.h"
+#include "device3/InFlightRequest.h"
+
+namespace android {
+
+namespace camera3 {
+
+class Camera3Stream;
+class Camera3OutputStreamInterface;
+class Camera3StreamInterface;
+
+} // namespace camera3
+
+/**
+ * HidlCamera3OfflineSession for offline session defined in HIDL ICameraOfflineSession@3.6 or higher
+ */
+class HidlCamera3OfflineSession :
+            public Camera3OfflineSession,
+            virtual public hardware::camera::device::V3_5::ICameraDeviceCallback {
+  public:
+
+    // initialize by Camera3Device.
+    explicit HidlCamera3OfflineSession(const String8& id,
+            const sp<camera3::Camera3Stream>& inputStream,
+            const camera3::StreamSet& offlineStreamSet,
+            camera3::BufferRecords&& bufferRecords,
+            const camera3::InFlightRequestMap& offlineReqs,
+            const Camera3OfflineStates& offlineStates,
+            sp<hardware::camera::device::V3_6::ICameraOfflineSession> offlineSession) :
+      Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
+              offlineReqs, offlineStates),
+      mSession(offlineSession) {};
+
+    virtual ~HidlCamera3OfflineSession();
+
+    virtual status_t initialize(wp<NotificationListener> listener) override;
+
+    /**
+     * HIDL ICameraDeviceCallback interface
+     * Implementation of android::hardware::camera::device::V3_5::ICameraDeviceCallback
+     */
+
+    hardware::Return<void> processCaptureResult_3_4(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_4::CaptureResult>& results) override;
+    hardware::Return<void> processCaptureResult(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_2::CaptureResult>& results) override;
+    hardware::Return<void> notify(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_2::NotifyMsg>& msgs) override;
+
+    hardware::Return<void> requestStreamBuffers(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+            requestStreamBuffers_cb _hidl_cb) override;
+
+    hardware::Return<void> returnStreamBuffers(
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_2::StreamBuffer>& buffers) override;
+
+    /**
+     * End of CameraOfflineSessionBase interface
+     */
+
+  private:
+    sp<hardware::camera::device::V3_6::ICameraOfflineSession> mSession;
+    // FMQ to write result on. Must be guarded by mProcessCaptureResultLock.
+    std::unique_ptr<ResultMetadataQueue> mResultMetadataQueue;
+
+    virtual void disconnectSession() override;
+}; // class Camera3OfflineSession
+
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
new file mode 100644
index 0000000..8b0cd65
--- /dev/null
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
@@ -0,0 +1,261 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HidlCamera3-OutputUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) states.setErrIntf.setErrorState(   \
+    "%s: " fmt, __FUNCTION__,                         \
+    ##__VA_ARGS__)
+
+#include <inttypes.h>
+
+#include <utils/Log.h>
+#include <utils/SortedVector.h>
+#include <utils/Trace.h>
+
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+
+#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
+
+#include <camera/CameraUtils.h>
+#include <camera_metadata_hidden.h>
+
+#include "device3/hidl/HidlCamera3OutputUtils.h"
+#include "device3/aidl/AidlCamera3OutputUtils.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3OutputUtilsTemplated.h"
+
+#include "system/camera_metadata.h"
+
+using namespace android::camera3;
+using namespace android::hardware::camera;
+
+namespace android {
+namespace camera3 {
+
+void processOneCaptureResultLocked(
+        HidlCaptureOutputStates& states,
+        const hardware::camera::device::V3_2::CaptureResult& result,
+        const hardware::hidl_vec<
+                hardware::camera::device::V3_4::PhysicalCameraMetadata> &physicalCameraMetadata) {
+    processOneCaptureResultLockedT<HidlCaptureOutputStates,
+        hardware::camera::device::V3_2::CaptureResult,
+        hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata>,
+        hardware::hidl_vec<uint8_t>, ResultMetadataQueue,
+        hardware::camera::device::V3_2::BufferStatus>(states, result, physicalCameraMetadata);
+}
+
+void notify(CaptureOutputStates& states,
+        const hardware::camera::device::V3_2::NotifyMsg& msg) {
+
+    using android::hardware::camera::device::V3_2::MsgType;
+    using android::hardware::camera::device::V3_2::ErrorCode;
+
+    ATRACE_CALL();
+    camera_notify_msg m;
+    switch (msg.type) {
+        case MsgType::ERROR:
+            m.type = CAMERA_MSG_ERROR;
+            m.message.error.frame_number = msg.msg.error.frameNumber;
+            if (msg.msg.error.errorStreamId >= 0) {
+                sp<Camera3StreamInterface> stream =
+                        states.outputStreams.get(msg.msg.error.errorStreamId);
+                if (stream == nullptr) {
+                    ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
+                            m.message.error.frame_number, msg.msg.error.errorStreamId);
+                    return;
+                }
+                m.message.error.error_stream = stream->asHalStream();
+            } else {
+                m.message.error.error_stream = nullptr;
+            }
+            switch (msg.msg.error.errorCode) {
+                case ErrorCode::ERROR_DEVICE:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_DEVICE;
+                    break;
+                case ErrorCode::ERROR_REQUEST:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_REQUEST;
+                    break;
+                case ErrorCode::ERROR_RESULT:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_RESULT;
+                    break;
+                case ErrorCode::ERROR_BUFFER:
+                    m.message.error.error_code = CAMERA_MSG_ERROR_BUFFER;
+                    break;
+            }
+            break;
+        case MsgType::SHUTTER:
+            m.type = CAMERA_MSG_SHUTTER;
+            m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
+            m.message.shutter.timestamp = msg.msg.shutter.timestamp;
+            m.message.shutter.readout_timestamp = 0LL;
+            break;
+    }
+    notify(states, &m);
+}
+
+static void convertToAidl(
+        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& hidlBufReqs,
+        std::vector<aidl::android::hardware::camera::device::BufferRequest> &aidlBufReqs) {
+    size_t i = 0;
+    aidlBufReqs.resize(hidlBufReqs.size());
+    for (const auto &hidlBufReq : hidlBufReqs) {
+        aidlBufReqs[i].streamId = hidlBufReq.streamId;
+        aidlBufReqs[i].numBuffersRequested = hidlBufReq.numBuffersRequested;
+        i++;
+    }
+}
+
+static hardware::camera::device::V3_5::StreamBufferRequestError
+convertToHidl(aidl::android::hardware::camera::device::StreamBufferRequestError aError) {
+    using AError = aidl::android::hardware::camera::device::StreamBufferRequestError;
+    using HError = hardware::camera::device::V3_5::StreamBufferRequestError;
+
+    switch(aError) {
+        case AError::NO_BUFFER_AVAILABLE:
+            return HError::NO_BUFFER_AVAILABLE;
+        case AError::MAX_BUFFER_EXCEEDED:
+            return HError::MAX_BUFFER_EXCEEDED;
+        case AError::STREAM_DISCONNECTED:
+            return HError::STREAM_DISCONNECTED;
+        default:
+            return HError::UNKNOWN_ERROR;
+    }
+}
+
+static hardware::camera::device::V3_5::BufferRequestStatus
+convertToHidl(const aidl::android::hardware::camera::device::BufferRequestStatus &aBufStatus) {
+    using AStatus = aidl::android::hardware::camera::device::BufferRequestStatus;
+    using HStatus = hardware::camera::device::V3_5::BufferRequestStatus;
+    switch (aBufStatus) {
+        case AStatus::OK:
+            return HStatus::OK;
+        case AStatus::FAILED_PARTIAL:
+            return HStatus::FAILED_PARTIAL;
+        case AStatus::FAILED_CONFIGURING:
+            return HStatus::FAILED_CONFIGURING;
+        case AStatus::FAILED_ILLEGAL_ARGUMENTS:
+            return HStatus::FAILED_ILLEGAL_ARGUMENTS;
+        case AStatus::FAILED_UNKNOWN:
+            return HStatus::FAILED_UNKNOWN;
+    }
+    return HStatus::FAILED_UNKNOWN;
+}
+
+static hardware::camera::device::V3_2::BufferStatus
+convertToHidl(const aidl::android::hardware::camera::device::BufferStatus &aBufStatus) {
+    using AStatus = aidl::android::hardware::camera::device::BufferStatus;
+    using HStatus = hardware::camera::device::V3_2::BufferStatus;
+    switch (aBufStatus) {
+        case AStatus::OK:
+            return HStatus::OK;
+        case AStatus::ERROR:
+            return HStatus::ERROR;
+    }
+    return HStatus::ERROR;
+}
+
+static native_handle_t *convertToHidl(const aidl::android::hardware::common::NativeHandle &ah,
+        std::vector<native_handle_t *> &handlesCreated) {
+    if (isHandleNull(ah)) {
+        return nullptr;
+    }
+    native_handle_t *nh = makeFromAidl(ah);
+    handlesCreated.emplace_back(nh);
+    return nh;
+}
+
+static void convertToHidl(
+        const std::vector<aidl::android::hardware::camera::device::StreamBuffer> &aBuffers,
+        hardware::camera::device::V3_5::StreamBuffersVal &hBuffersVal,
+        std::vector<native_handle_t *> &handlesCreated) {
+    using HStreamBuffer = hardware::camera::device::V3_2::StreamBuffer;
+    hardware::hidl_vec<HStreamBuffer> tmpBuffers(aBuffers.size());
+    size_t i = 0;
+    for (const auto &aBuf : aBuffers) {
+        tmpBuffers[i].status = convertToHidl(aBuf.status);
+        tmpBuffers[i].streamId = aBuf.streamId;
+        tmpBuffers[i].bufferId = aBuf.bufferId;
+        tmpBuffers[i].buffer = convertToHidl(aBuf.buffer, handlesCreated);
+        tmpBuffers[i].acquireFence = convertToHidl(aBuf.acquireFence, handlesCreated);
+        tmpBuffers[i].releaseFence = convertToHidl(aBuf.releaseFence, handlesCreated);
+        i++;
+    }
+    hBuffersVal.buffers(std::move(tmpBuffers));
+}
+
+static void convertToHidl(
+        const std::vector<aidl::android::hardware::camera::device::StreamBufferRet> &aidlBufRets,
+        hardware::hidl_vec<hardware::camera::device::V3_5::StreamBufferRet> &hidlBufRets,
+        std::vector<native_handle_t *> &handlesCreated) {
+    size_t i = 0;
+    using Tag = aidl::android::hardware::camera::device::StreamBuffersVal::Tag;
+    hidlBufRets.resize(aidlBufRets.size());
+    for (const auto &aidlBufRet : aidlBufRets) {
+        auto &hidlBufRet = hidlBufRets[i];
+        hidlBufRet.streamId = aidlBufRet.streamId;
+        switch(aidlBufRet.val.getTag()) {
+          case Tag::error:
+              hidlBufRet.val.error(convertToHidl(aidlBufRet.val.get<Tag::error>()));
+              break;
+          case Tag::buffers:
+              convertToHidl(aidlBufRet.val.get<Tag::buffers>(), hidlBufRet.val, handlesCreated);
+              break;
+        }
+        i++;
+    }
+}
+
+// The buffers requested through this call are not tied to any CaptureRequest in
+// particular. They may used by the hal for a particular frame's output buffer
+// or for its internal use as well. In the case that the hal does use any buffer
+// from the requested list here, for a particular frame's output buffer, the
+// buffer will be returned with the processCaptureResult call corresponding to
+// the frame. The other buffers will be returned through returnStreamBuffers.
+// The buffers returned via returnStreamBuffers will not have a valid
+// timestamp(0) and will be dropped by the bufferqueue.
+void requestStreamBuffers(RequestBufferStates& states,
+        const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+        hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
+   using android::hardware::camera::device::V3_2::BufferStatus;
+    using android::hardware::camera::device::V3_2::StreamBuffer;
+    using android::hardware::camera::device::V3_5::BufferRequestStatus;
+    using android::hardware::camera::device::V3_5::StreamBufferRet;
+    using android::hardware::camera::device::V3_5::StreamBufferRequestError;
+    std::vector<aidl::android::hardware::camera::device::BufferRequest> aidlBufReqs;
+    hardware::hidl_vec<hardware::camera::device::V3_5::StreamBufferRet> hidlBufRets;
+    convertToAidl(bufReqs, aidlBufReqs);
+    std::vector<::aidl::android::hardware::camera::device::StreamBufferRet> aidlBufRets;
+    ::aidl::android::hardware::camera::device::BufferRequestStatus aidlBufRetStatus;
+
+    requestStreamBuffers(states, aidlBufReqs, &aidlBufRets, &aidlBufRetStatus);
+    std::vector<native_handle_t *> handlesCreated;
+    convertToHidl(aidlBufRets, hidlBufRets, handlesCreated);
+    _hidl_cb(convertToHidl(aidlBufRetStatus), hidlBufRets);
+    Camera3Device::cleanupNativeHandles(&handlesCreated);
+}
+
+void returnStreamBuffers(ReturnBufferStates& states,
+        const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
+    returnStreamBuffersT(states, buffers);
+}
+
+} // camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h
new file mode 100644
index 0000000..5e6cba6
--- /dev/null
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_HIDL_CAMERA3_OUTPUT_UTILS_H
+#define ANDROID_SERVERS_HIDL_CAMERA3_OUTPUT_UTILS_H
+
+#include <memory>
+#include <mutex>
+
+#include <cutils/native_handle.h>
+
+#include <fmq/MessageQueue.h>
+
+#include <common/CameraDeviceBase.h>
+
+#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
+
+#include "device3/BufferUtils.h"
+#include "device3/InFlightRequest.h"
+#include "device3/Camera3Stream.h"
+#include "device3/Camera3OutputUtils.h"
+
+namespace android {
+
+using ResultMetadataQueue = hardware::MessageQueue<uint8_t, hardware::kSynchronizedReadWrite>;
+
+namespace camera3 {
+
+    /**
+     * Helper methods shared between HidlCamera3Device/HidlCamera3OfflineSession for HAL callbacks
+     */
+    // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
+    // callbacks
+    struct HidlCaptureOutputStates : public CaptureOutputStates {
+        std::unique_ptr<ResultMetadataQueue>& fmq;
+    };
+
+    // Handle one capture result. Assume callers hold the lock to serialize all
+    // processCaptureResult calls
+    void processOneCaptureResultLocked(
+            HidlCaptureOutputStates& states,
+            const hardware::camera::device::V3_2::CaptureResult& result,
+            const hardware::hidl_vec<
+                    hardware::camera::device::V3_4::PhysicalCameraMetadata>
+                            &physicalCameraMetadata);
+
+    // Handle one notify message
+    void notify(CaptureOutputStates& states,
+            const hardware::camera::device::V3_2::NotifyMsg& msg);
+    void requestStreamBuffers(RequestBufferStates& states,
+            const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
+            hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb
+                    _hidl_cb);
+    void returnStreamBuffers(ReturnBufferStates& states,
+            const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers);
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index f063506..3392db1 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -16,7 +16,7 @@
 #include <hardware/camera.h>
 
 #include <hidl/AidlCameraDeviceCallbacks.h>
-#include <hidl/Convert.h>
+#include <hidl/Utils.h>
 
 namespace android {
 namespace frameworks {
@@ -34,7 +34,7 @@
 
 H2BCameraDeviceCallbacks::H2BCameraDeviceCallbacks(const sp<HalInterface>& base) : CBase(base) { }
 
-bool H2BCameraDeviceCallbacks::initializeLooper() {
+bool H2BCameraDeviceCallbacks::initializeLooper(int vndkVersion) {
     mCbLooper = new ALooper;
     mCbLooper->setName("cs-looper");
     status_t err = mCbLooper->start(/*runOnCallingThread*/ false, /*canCallJava*/ false,
@@ -43,7 +43,7 @@
         ALOGE("Unable to start camera device callback looper");
         return false;
     }
-    mHandler = new CallbackHandler(this);
+    mHandler = new CallbackHandler(this, vndkVersion);
     mCbLooper->registerHandler(mHandler);
     return true;
 }
@@ -144,6 +144,12 @@
 
     // Convert Metadata into HCameraMetadata;
     FmqSizeOrMetadata hResult;
+    using hardware::cameraservice::utils::conversion::filterVndkKeys;
+    if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
+        ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
+                __FUNCTION__);
+        return;
+    }
     const camera_metadata_t *rawMetadata = result.getAndLock();
     converter->convertResultMetadataToHidl(rawMetadata, &hResult);
     result.unlock(rawMetadata);
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
index dbf520a..152002b 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
@@ -61,7 +61,7 @@
 
     ~H2BCameraDeviceCallbacks();
 
-    bool initializeLooper();
+    bool initializeLooper(int vndkVersion);
 
     virtual binder::Status onDeviceError(int32_t errorCode,
                                          const CaptureResultExtras& resultExtras) override;
@@ -103,10 +103,12 @@
     struct CallbackHandler : public AHandler {
         public:
             void onMessageReceived(const sp<AMessage> &msg) override;
-            CallbackHandler(H2BCameraDeviceCallbacks *converter) : mConverter(converter) { }
+            CallbackHandler(H2BCameraDeviceCallbacks *converter, int vndkVersion) :
+                    mConverter(converter), mVndkVersion(vndkVersion) { }
         private:
             void processResultMessage(sp<ResultWrapper> &resultWrapper);
             wp<H2BCameraDeviceCallbacks> mConverter = nullptr;
+            int mVndkVersion = -1;
             Mutex mMetadataQueueLock;
     };
 
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
index 8e619e1..add9121 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
@@ -15,7 +15,7 @@
  */
 
 #include <hidl/AidlCameraServiceListener.h>
-#include <hidl/Convert.h>
+#include <hidl/Utils.h>
 
 namespace android {
 namespace frameworks {
@@ -70,6 +70,11 @@
   return binder::Status::ok();
 }
 
+::android::binder::Status H2BCameraServiceListener::onTorchStrengthLevelChanged(
+    const ::android::String16&, int32_t) {
+  return binder::Status::ok();
+}
+
 } // implementation
 } // V2_0
 } // common
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 7148035..7ef413f 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -54,6 +54,8 @@
 
     virtual ::android::binder::Status onTorchStatusChanged(
             int32_t status, const ::android::String16& cameraId) override;
+    virtual ::android::binder::Status onTorchStrengthLevelChanged(
+            const ::android::String16& cameraId, int32_t newStrengthLevel) override;
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // TODO: no implementation yet.
         return binder::Status::ok();
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 2509e6c..26e813a 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -20,8 +20,8 @@
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 
 #include <hidl/AidlCameraDeviceCallbacks.h>
-#include <hidl/Convert.h>
 #include <hidl/HidlCameraDeviceUser.h>
+#include <hidl/Utils.h>
 #include <android/hardware/camera/device/3.2/types.h>
 
 namespace android {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 7d1b3cf..65a0300 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -14,13 +14,13 @@
  * limitations under the License.
  */
 
-#include <hidl/Convert.h>
+#include <android-base/properties.h>
 
-#include <hidl/HidlCameraService.h>
-
-#include <hidl/HidlCameraDeviceUser.h>
 #include <hidl/AidlCameraDeviceCallbacks.h>
 #include <hidl/AidlCameraServiceListener.h>
+#include <hidl/HidlCameraService.h>
+#include <hidl/HidlCameraDeviceUser.h>
+#include <hidl/Utils.h>
 
 #include <hidl/HidlTransportSupport.h>
 
@@ -34,6 +34,7 @@
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::hidl_vec;
 using hardware::cameraservice::utils::conversion::convertToHidl;
+using hardware::cameraservice::utils::conversion::filterVndkKeys;
 using hardware::cameraservice::utils::conversion::B2HStatus;
 using hardware::Void;
 
@@ -53,6 +54,10 @@
     return gHidlCameraService;
 }
 
+HidlCameraService::HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) {
+    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+};
+
 Return<void>
 HidlCameraService::getCameraCharacteristics(const hidl_string& cameraId,
                                             getCameraCharacteristics_cb _hidl_cb) {
@@ -77,6 +82,11 @@
         _hidl_cb(status, hidlMetadata);
         return Void();
     }
+    if (filterVndkKeys(mVndkVersion, cameraMetadata) != OK) {
+        ALOGE("%s: Unable to filter vndk metadata keys for version %d", __FUNCTION__, mVndkVersion);
+        _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+        return Void();
+    }
     const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
     convertToHidl(rawMetadata, &hidlMetadata);
     _hidl_cb(status, hidlMetadata);
@@ -97,7 +107,7 @@
     // Create a hardware::camera2::ICameraDeviceCallback object which internally
     // calls callback functions passed through hCallback.
     sp<H2BCameraDeviceCallbacks> hybridCallbacks = new H2BCameraDeviceCallbacks(hCallback);
-    if (!hybridCallbacks->initializeLooper()) {
+    if (!hybridCallbacks->initializeLooper(mVndkVersion)) {
         ALOGE("Unable to handle callbacks on device, cannot connect");
         _hidl_cb(HStatus::UNKNOWN_ERROR, nullptr);
         return Void();
@@ -279,6 +289,9 @@
         size_t numSections = sectionNames->size();
         std::vector<std::vector<HVendorTag>> tagsBySection(numSections);
         int tagCount = desc->getTagCount();
+        if (tagCount <= 0) {
+            continue;
+        }
         std::vector<uint32_t> tags(tagCount);
         desc->getTagArray(tags.data());
         for (int i = 0; i < tagCount; i++) {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.h b/services/camera/libcameraservice/hidl/HidlCameraService.h
index 86a7cec..1c8145c 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.h
@@ -75,7 +75,7 @@
     static sp<HidlCameraService> getInstance(android::CameraService *cs);
 
 private:
-    HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) { };
+    HidlCameraService(android::CameraService *cs);
 
     sp<hardware::ICameraServiceListener> searchListenerCacheLocked(
         sp<HCameraServiceListener> listener, /*removeIfFound*/ bool shouldRemove = false);
@@ -95,6 +95,7 @@
     using HIListeners =
         std::pair<sp<HCameraServiceListener>, sp<ICameraServiceListener>>;
     std::list<HIListeners> mListeners;
+    int mVndkVersion = -1;
 };
 
 }  // namespace implementation
diff --git a/services/camera/libcameraservice/hidl/Convert.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
similarity index 92%
rename from services/camera/libcameraservice/hidl/Convert.cpp
rename to services/camera/libcameraservice/hidl/Utils.cpp
index 597147b..057a6e9 100644
--- a/services/camera/libcameraservice/hidl/Convert.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#include <hidl/Convert.h>
+#include <hidl/Utils.h>
+#include <hidl/VndkVersionMetadataTags.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <cutils/native_handle.h>
 #include <mediautils/AImageReaderUtils.h>
@@ -297,6 +298,31 @@
     return hPhysicalCaptureResultInfos;
 }
 
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
+    if (vndkVersion == __ANDROID_API_FUTURE__) {
+        // VNDK version in ro.vndk.version is a version code-name that
+        // corresponds to the current version.
+        return OK;
+    }
+    const auto &apiLevelToKeys =
+            isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
+    // Find the vndk versions above the given vndk version. All the vndk
+    // versions above the given one, need to have their keys filtered from the
+    // metadata in order to avoid metadata invalidation.
+    auto it = apiLevelToKeys.upper_bound(vndkVersion);
+    while (it != apiLevelToKeys.end()) {
+        for (const auto &key : it->second) {
+            status_t res = metadata.erase(key);
+            if (res != OK) {
+                ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+                return res;
+            }
+        }
+        it++;
+    }
+    return OK;
+}
+
 } //conversion
 } // utils
 } //cameraservice
diff --git a/services/camera/libcameraservice/hidl/Convert.h b/services/camera/libcameraservice/hidl/Utils.h
similarity index 96%
rename from services/camera/libcameraservice/hidl/Convert.h
rename to services/camera/libcameraservice/hidl/Utils.h
index 82ffc48..e6d4393 100644
--- a/services/camera/libcameraservice/hidl/Convert.h
+++ b/services/camera/libcameraservice/hidl/Utils.h
@@ -29,6 +29,7 @@
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 #include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
 #include <android/hardware/ICameraService.h>
+#include <camera/CameraMetadata.h>
 #include <fmq/MessageQueue.h>
 #include <hardware/camera.h>
 #include <hidl/MQDescriptor.h>
@@ -96,6 +97,8 @@
 
 HStatus B2HStatus(const binder::Status &bStatus);
 
+status_t filterVndkKeys(int vndk_version, CameraMetadata &metadata, bool isStatic = true);
+
 } // conversion
 } // utils
 } // cameraservice
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
new file mode 100644
index 0000000..d3377f4
--- /dev/null
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from vndk_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by hidl/Utils.cpp.
+ */
+
+/**
+ * API level to static keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
+      {30, {
+          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+          ANDROID_CONTROL_ZOOM_RATIO_RANGE,
+          ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
+          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
+        } },
+      {31, {
+          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_BINNING_FACTOR,
+        } },
+      {32, {
+          ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
+        } },
+      {33, {
+          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_AUTOMOTIVE_LENS_FACING,
+          ANDROID_AUTOMOTIVE_LOCATION,
+          ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
+          ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
+          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
+          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+        } },
+};
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
+      {30, {
+          ANDROID_CONTROL_ZOOM_RATIO,
+          ANDROID_SCALER_ROTATE_AND_CROP,
+          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
+        }  },
+      {31, {
+          ANDROID_SENSOR_PIXEL_MODE,
+          ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
+        }  },
+};
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 3d74f0b..e43b91f 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -52,6 +52,7 @@
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
         "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V1-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.3",
@@ -65,5 +66,10 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        libfuzzer_options: [
+            //based on b/187360866
+            "timeout=770",
+        ],
+
     },
 }
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index e46bf74..97d7bf4 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -466,6 +466,12 @@
         // No op
         return binder::Status::ok();
     }
+
+    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+            int32_t /*torchStrength*/) {
+        // No op
+        return binder::Status::ok();
+    }
 };
 
 class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
new file mode 100644
index 0000000..3616572
--- /dev/null
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -0,0 +1,125 @@
+// Copyright 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: [
+        "frameworks_av_services_camera_libcameraservice_license",
+    ],
+}
+
+cc_test {
+    name: "cameraservice_test",
+
+    include_dirs: [
+        "system/media/private/camera/include",
+        "external/dynamic_depth/includes",
+        "external/dynamic_depth/internal",
+    ],
+
+    shared_libs: [
+        "libbase",
+        "libcutils",
+        "libcameraservice",
+        "libhidlbase",
+        "liblog",
+        "libcamera_client",
+        "libcamera_metadata",
+        "libui",
+        "libutils",
+        "libjpeg",
+        "libexif",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V1-ndk",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.7",
+        "android.hidl.token@1.0-utils",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    srcs: [
+        "CameraProviderManagerTest.cpp",
+        "ClientManagerTest.cpp",
+        "DepthProcessorTest.cpp",
+        "DistortionMapperTest.cpp",
+        "ExifUtilsTest.cpp",
+        "NV12Compressor.cpp",
+        "RotateAndCropMapperTest.cpp",
+        "ZoomRatioTest.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+
+}
+
+cc_test_host {
+    name: "cameraservice_test_host",
+
+    include_dirs: [
+        "frameworks/av/camera/include",
+        "frameworks/av/camera/include/camera",
+        "frameworks/native/libs/binder/include_activitymanager"
+    ],
+
+    shared_libs: [
+        "libactivity_manager_procstate_aidl-cpp",
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libdynamic_depth",
+        "libexif",
+        "libjpeg",
+        "liblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libcamera_client_host",
+        "libcameraservice_device_independent",
+        "libgmock",
+    ],
+
+    srcs: [
+        "ClientManagerTest.cpp",
+        "DepthProcessorTest.cpp",
+        "DistortionMapperTest.cpp",
+        "ExifUtilsTest.cpp",
+        "NV12Compressor.cpp",
+        "RotateAndCropMapperTest.cpp",
+        "ZoomRatioTest.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+
+}
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
deleted file mode 100644
index 0b5ad79..0000000
--- a/services/camera/libcameraservice/tests/Android.mk
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= $(call all-cpp-files-under, .)
-
-LOCAL_SHARED_LIBRARIES := \
-    libbase \
-    libcutils \
-    libcameraservice \
-    libhidlbase \
-    liblog \
-    libcamera_client \
-    libcamera_metadata \
-    libui \
-    libutils \
-    libjpeg \
-    libexif \
-    android.hardware.camera.common@1.0 \
-    android.hardware.camera.provider@2.4 \
-    android.hardware.camera.provider@2.5 \
-    android.hardware.camera.provider@2.6 \
-    android.hardware.camera.provider@2.7 \
-    android.hardware.camera.device@1.0 \
-    android.hardware.camera.device@3.2 \
-    android.hardware.camera.device@3.4 \
-    android.hardware.camera.device@3.7 \
-    android.hidl.token@1.0-utils
-
-LOCAL_STATIC_LIBRARIES := \
-    libgmock
-
-LOCAL_C_INCLUDES += \
-    system/media/private/camera/include \
-    external/dynamic_depth/includes \
-    external/dynamic_depth/internal \
-
-LOCAL_CFLAGS += -Wall -Wextra -Werror
-
-LOCAL_SANITIZE := address
-
-LOCAL_MODULE:= cameraservice_test
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/../NOTICE
-LOCAL_COMPATIBILITY_SUITE := device-tests
-LOCAL_MODULE_TAGS := tests
-
-include $(BUILD_NATIVE_TEST)
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a74fd9d..e9f6979 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -212,7 +212,7 @@
  * Simple test version of the interaction proxy, to use to inject onRegistered calls to the
  * CameraProviderManager
  */
-struct TestInteractionProxy : public CameraProviderManager::ServiceInteractionProxy {
+struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy {
     sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
     sp<TestICameraProvider> mTestCameraProvider;
 
@@ -269,11 +269,13 @@
     ~TestStatusListener() {}
 
     void onDeviceStatusChanged(const String8 &,
-            hardware::camera::common::V1_0::CameraDeviceStatus) override {}
+            CameraDeviceStatus) override {}
     void onDeviceStatusChanged(const String8 &, const String8 &,
-            hardware::camera::common::V1_0::CameraDeviceStatus) override {}
+            CameraDeviceStatus) override {}
     void onTorchStatusChanged(const String8 &,
-            hardware::camera::common::V1_0::TorchModeStatus) override {}
+            TorchModeStatus) override {}
+    void onTorchStatusChanged(const String8 &,
+            TorchModeStatus, SystemCameraKind) override {}
     void onNewProviderRegistered() override {}
 };
 
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.h b/services/camera/libcameraservice/tests/NV12Compressor.h
index ee22d5e..a959871 100644
--- a/services/camera/libcameraservice/tests/NV12Compressor.h
+++ b/services/camera/libcameraservice/tests/NV12Compressor.h
@@ -19,6 +19,7 @@
 
 #include <setjmp.h>
 #include <stdlib.h>
+#include <stdio.h>
 extern "C" {
 #include <jpeglib.h>
 #include <jerror.h>
diff --git a/services/camera/libcameraservice/tests/how_to_run.txt b/services/camera/libcameraservice/tests/how_to_run.txt
new file mode 100644
index 0000000..93239e3
--- /dev/null
+++ b/services/camera/libcameraservice/tests/how_to_run.txt
@@ -0,0 +1,5 @@
+adb root &&
+m cameraservice_test &&
+adb push $ANDROID_PRODUCT_OUT/data/nativetest/cameraservice_test/cameraservice_test \
+    /data/nativetest/cameraservice_test/arm64/cameraservice_test &&
+adb shell /data/nativetest/cameraservice_test/arm64/cameraservice_test
\ No newline at end of file
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 8699543..69175cc 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -66,10 +66,11 @@
     }
 }
 
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive() {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
+    mSessionStats.mMaxPreviewFps = maxPreviewFps;
     updateProxyDeviceState(mSessionStats);
 
     // Reset mCreationDuration to -1 to distinguish between 1st session
@@ -79,6 +80,7 @@
 
 void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        const std::string& userTag, int32_t videoStabilizationMode,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
     Mutex::Autolock l(mLock);
 
@@ -86,6 +88,8 @@
     mSessionStats.mRequestCount = requestCount;
     mSessionStats.mResultErrorCount = resultErrorCount;
     mSessionStats.mDeviceError = deviceError;
+    mSessionStats.mUserTag = String16(userTag.c_str());
+    mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mStreamStats = streamStats;
     updateProxyDeviceState(mSessionStats);
 
@@ -158,7 +162,7 @@
     sessionStats->onStreamConfigured(operatingMode, internalConfig, latencyMs);
 }
 
-void CameraServiceProxyWrapper::logActive(const String8& id) {
+void CameraServiceProxyWrapper::logActive(const String8& id, float maxPreviewFps) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -171,11 +175,12 @@
     }
 
     ALOGV("%s: id %s", __FUNCTION__, id.c_str());
-    sessionStats->onActive();
+    sessionStats->onActive(maxPreviewFps);
 }
 
 void CameraServiceProxyWrapper::logIdle(const String8& id,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        const std::string& userTag, int32_t videoStabilizationMode,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
@@ -189,8 +194,9 @@
         return;
     }
 
-    ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d",
-            __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError);
+    ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
+            ", userTag %s, videoStabilizationMode %d", __FUNCTION__, id.c_str(), requestCount,
+            resultErrorCount, deviceError, userTag.c_str(), videoStabilizationMode);
     for (size_t i = 0; i < streamStats.size(); i++) {
         ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
                 PRId64 ", startTimeMs %d" ,
@@ -199,7 +205,8 @@
                 streamStats[i].mStartLatencyMs);
     }
 
-    sessionStats->onIdle(requestCount, resultErrorCount, deviceError, streamStats);
+    sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+            videoStabilizationMode, streamStats);
 }
 
 void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
@@ -255,4 +262,16 @@
     sessionStats->onClose(latencyMs);
 }
 
+bool CameraServiceProxyWrapper::isCameraDisabled() {
+    sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return true;
+    bool ret = false;
+    auto status = proxyBinder->isCameraDisabled(&ret);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed during camera disabled query: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+    return ret;
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index f701e94..e34a8f0 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -48,8 +48,9 @@
         void onOpen();
         void onClose(int32_t latencyMs);
         void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
-        void onActive();
+        void onActive(float maxPreviewFps);
         void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+                const std::string& userTag, int32_t videoStabilizationMode,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
     };
 
@@ -81,11 +82,12 @@
             int32_t latencyMs);
 
     // Session state becomes active
-    static void logActive(const String8& id);
+    static void logActive(const String8& id, float maxPreviewFps);
 
     // Session state becomes idle
     static void logIdle(const String8& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+            const std::string& userTag, int32_t videoStabilizationMode,
             const std::vector<hardware::CameraStreamStats>& streamStats);
 
     // Ping camera service proxy for user update
@@ -93,6 +95,9 @@
 
     // Return the current top activity rotate and crop override.
     static int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+
+    // Detect if the camera is disabled by device policy.
+    static bool isCameraDisabled();
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/CameraTraces.cpp b/services/camera/libcameraservice/utils/CameraTraces.cpp
index 0198690..0cd4f5d 100644
--- a/services/camera/libcameraservice/utils/CameraTraces.cpp
+++ b/services/camera/libcameraservice/utils/CameraTraces.cpp
@@ -64,7 +64,7 @@
     ATRACE_END();
 }
 
-status_t CameraTraces::dump(int fd, const Vector<String16> &args __attribute__((unused))) {
+status_t CameraTraces::dump(int fd) {
     ALOGV("%s: fd = %d", __FUNCTION__, fd);
     Mutex::Autolock al(sImpl.tracesLock);
     List<ProcessCallStack>& pcsList = sImpl.pcsList;
diff --git a/services/camera/libcameraservice/utils/CameraTraces.h b/services/camera/libcameraservice/utils/CameraTraces.h
index 13ca16d..71fa334 100644
--- a/services/camera/libcameraservice/utils/CameraTraces.h
+++ b/services/camera/libcameraservice/utils/CameraTraces.h
@@ -42,7 +42,7 @@
      *
      * <p>Each line is indented by DUMP_INDENT spaces.</p>
      */
-    static status_t dump(int fd, const Vector<String16>& args);
+    static status_t dump(int fd);
 
 private:
     enum {
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index d164885..074c84d 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -527,12 +527,7 @@
         if (!returnIncompatibleClients) {
             // Find evicted clients
 
-            if (conflicting && curPriority < priority) {
-                // Pre-existing conflicting client with higher priority exists
-                evictList.clear();
-                evictList.push_back(client);
-                return evictList;
-            } else if (conflicting && owner == curOwner) {
+            if (conflicting && owner == curOwner) {
                 // Pre-existing conflicting client with the same client owner exists
                 // Open the same device twice -> most recent open wins
                 // Otherwise let the existing client wins to avoid behaviors difference
@@ -546,6 +541,11 @@
                     evictList.push_back(client);
                     return evictList;
                 }
+            } else if (conflicting && curPriority < priority) {
+                // Pre-existing conflicting client with higher priority exists
+                evictList.clear();
+                evictList.push_back(client);
+                return evictList;
             } else if (conflicting || ((totalCost > mMaxCost && curCost > 0) &&
                     (curPriority >= priority) &&
                     !(highestPriorityOwner == owner && owner == curOwner))) {
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl b/services/camera/libcameraservice/utils/IPCTransport.h
similarity index 73%
rename from media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
rename to services/camera/libcameraservice/utils/IPCTransport.h
index a2cbf62..b8e80ac 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
+++ b/services/camera/libcameraservice/utils/IPCTransport.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,11 +14,13 @@
  * limitations under the License.
  */
 
-package android.media;
+#ifndef ANDROID_SERVERS_IPC_H_
+#define ANDROID_SERVERS_IPC_H_
 
-/**
- * {@hide}
- */
-parcelable AudioPortConfigSessionExt {
-    int session;
-}
+enum class IPCTransport : uint32_t {
+  HIDL = 0,
+  AIDL = 1,
+  INVALID = 2
+};
+
+#endif
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index a239c81..2eb2d55 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -13,71 +13,27 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 #include <cutils/properties.h>
 
 #include "SessionConfigurationUtils.h"
 #include "../api2/DepthCompositeStream.h"
 #include "../api2/HeicCompositeStream.h"
 #include "common/CameraDeviceBase.h"
+#include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
-#include "device3/Camera3Device.h"
+#include "device3/aidl/AidlCamera3Device.h"
+#include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "system/graphics-base-v1.1.h"
 
 using android::camera3::OutputStreamInfo;
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
-using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
 
 namespace android {
 namespace camera3 {
 
-int32_t SessionConfigurationUtils::PERF_CLASS_LEVEL =
-        property_get_int32("ro.odm.build.media_performance_class", 0);
-
-bool SessionConfigurationUtils::IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);
-
-camera3::Size SessionConfigurationUtils::getMaxJpegResolution(const CameraMetadata &metadata,
-        bool ultraHighResolution) {
-    int32_t maxJpegWidth = 0, maxJpegHeight = 0;
-    const int STREAM_CONFIGURATION_SIZE = 4;
-    const int STREAM_FORMAT_OFFSET = 0;
-    const int STREAM_WIDTH_OFFSET = 1;
-    const int STREAM_HEIGHT_OFFSET = 2;
-    const int STREAM_IS_INPUT_OFFSET = 3;
-
-    int32_t scalerSizesTag = ultraHighResolution ?
-            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
-                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
-    camera_metadata_ro_entry_t availableStreamConfigs =
-            metadata.find(scalerSizesTag);
-    if (availableStreamConfigs.count == 0 ||
-            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
-        return camera3::Size(0, 0);
-    }
-
-    // Get max jpeg size (area-wise).
-    for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
-        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
-        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
-        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
-        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
-        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
-                && format == HAL_PIXEL_FORMAT_BLOB &&
-                (width * height > maxJpegWidth * maxJpegHeight)) {
-            maxJpegWidth = width;
-            maxJpegHeight = height;
-        }
-    }
-
-    return camera3::Size(maxJpegWidth, maxJpegHeight);
-}
-
-size_t SessionConfigurationUtils::getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
-        camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
-    return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
-            (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
-}
-
 void StreamConfiguration::getStreamConfigurations(
         const CameraMetadata &staticInfo, int configuration,
         std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
@@ -126,65 +82,57 @@
     getStreamConfigurations(staticInfo, heicKey, scm);
 }
 
-int32_t SessionConfigurationUtils::getAppropriateModeTag(int32_t defaultTag, bool maxResolution) {
-    if (!maxResolution) {
-        return defaultTag;
+namespace SessionConfigurationUtils {
+
+int32_t PERF_CLASS_LEVEL =
+        property_get_int32("ro.odm.build.media_performance_class", 0);
+
+bool IS_PERF_CLASS = (PERF_CLASS_LEVEL >= SDK_VERSION_S);
+
+camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
+        bool ultraHighResolution) {
+    int32_t maxJpegWidth = 0, maxJpegHeight = 0;
+    const int STREAM_CONFIGURATION_SIZE = 4;
+    const int STREAM_FORMAT_OFFSET = 0;
+    const int STREAM_WIDTH_OFFSET = 1;
+    const int STREAM_HEIGHT_OFFSET = 2;
+    const int STREAM_IS_INPUT_OFFSET = 3;
+
+    int32_t scalerSizesTag = ultraHighResolution ?
+            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+    camera_metadata_ro_entry_t availableStreamConfigs =
+            metadata.find(scalerSizesTag);
+    if (availableStreamConfigs.count == 0 ||
+            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+        return camera3::Size(0, 0);
     }
-    switch (defaultTag) {
-        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
-            return ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS:
-            return ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS:
-            return ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
-            return ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
-            return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
-            return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
-            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
-            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
-            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
-            return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
-            return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
-            return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
-        case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
-            return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
-        case ANDROID_LENS_INTRINSIC_CALIBRATION:
-            return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
-        case ANDROID_LENS_DISTORTION:
-            return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
-        default:
-            ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
-                    defaultTag);
-            return -1;
+
+    // Get max jpeg size (area-wise).
+    for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+                && format == HAL_PIXEL_FORMAT_BLOB &&
+                (width * height > maxJpegWidth * maxJpegHeight)) {
+            maxJpegWidth = width;
+            maxJpegHeight = height;
+        }
     }
-    return -1;
+
+    return camera3::Size(maxJpegWidth, maxJpegHeight);
 }
 
-bool SessionConfigurationUtils::getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
-        int32_t arrayTag, int32_t *width, int32_t *height) {
-    if (width == nullptr || height == nullptr) {
-        ALOGE("%s: width / height nullptr", __FUNCTION__);
-        return false;
-    }
-    camera_metadata_ro_entry_t entry;
-    entry = deviceInfo->find(arrayTag);
-    if (entry.count != 4) return false;
-    *width = entry.data.i32[2];
-    *height = entry.data.i32[3];
-    return true;
+size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+        camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
+    return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+            (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
 }
 
 StreamConfigurationPair
-SessionConfigurationUtils::getStreamConfigurationPair(const CameraMetadata &staticInfo) {
+getStreamConfigurationPair(const CameraMetadata &staticInfo) {
     camera3::StreamConfigurationPair streamConfigurationPair;
     camera3::StreamConfiguration::getStreamConfigurations(staticInfo, false,
             &streamConfigurationPair.mDefaultStreamConfigurationMap);
@@ -193,13 +141,13 @@
     return streamConfigurationPair;
 }
 
-int64_t SessionConfigurationUtils::euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
+int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1) {
     int64_t d0 = x0 - x1;
     int64_t d1 = y0 - y1;
     return d0 * d0 + d1 * d1;
 }
 
-bool SessionConfigurationUtils::roundBufferDimensionNearest(int32_t width, int32_t height,
+bool roundBufferDimensionNearest(int32_t width, int32_t height,
         int32_t format, android_dataspace dataSpace,
         const CameraMetadata& info, bool maxResolution, /*out*/int32_t* outWidth,
         /*out*/int32_t* outHeight) {
@@ -260,7 +208,81 @@
     return true;
 }
 
-bool SessionConfigurationUtils::isPublicFormat(int32_t format)
+//check if format is 10-bit compatible
+bool is10bitCompatibleFormat(int32_t format) {
+    switch(format) {
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+        case HAL_PIXEL_FORMAT_YCBCR_P010:
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+    if (dynamicRangeProfile == ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+        // Supported by default
+        return true;
+    }
+
+    camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    bool is10bitDynamicRangeSupported = false;
+    for (size_t i = 0; i < entry.count; ++i) {
+        uint8_t capability = entry.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) {
+            is10bitDynamicRangeSupported = true;
+            break;
+        }
+    }
+
+    if (!is10bitDynamicRangeSupported) {
+        return false;
+    }
+
+    switch (dynamicRangeProfile) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
+            entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+            for (size_t i = 0; i < entry.count; i += 3) {
+                if (dynamicRangeProfile == entry.data.i64[i]) {
+                    return true;
+                }
+            }
+
+            return false;
+        default:
+            return false;
+    }
+
+    return false;
+}
+
+//check if format is 10-bit compatible
+bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile) {
+    switch (dynamicRangeProfile) {
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+        case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool isPublicFormat(int32_t format)
 {
     switch(format) {
         case HAL_PIXEL_FORMAT_RGBA_8888:
@@ -287,11 +309,34 @@
     }
 }
 
-binder::Status SessionConfigurationUtils::createSurfaceFromGbp(
+bool isStreamUseCaseSupported(int64_t streamUseCase,
+        const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t availableStreamUseCases =
+            deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
+
+    if (availableStreamUseCases.count == 0 &&
+            streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        return true;
+    }
+    // Allow vendor stream use case unconditionally.
+    if (streamUseCase >= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START) {
+        return true;
+    }
+
+    for (size_t i = 0; i < availableStreamUseCases.count; i++) {
+        if (availableStreamUseCases.data.i64[i] == streamUseCase) {
+            return true;
+        }
+    }
+    return false;
+}
+
+binder::Status createSurfaceFromGbp(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
-        const std::vector<int32_t> &sensorPixelModesUsed){
+        const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
+        int64_t streamUseCase, int timestampBase, int mirrorMode) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -389,6 +434,43 @@
         ALOGE("%s: %s", __FUNCTION__, msg.string());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
+    if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
+                physicalCameraMetadata)) {
+        String8 msg = String8::format("Camera %s: Dynamic range profile 0x%" PRIx64
+                " not supported,failed to create output stream", logicalCameraId.string(),
+                dynamicRangeProfile);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
+            !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+        String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
+                "format %#x defined and profile %" PRIx64 ", failed to create output stream",
+                logicalCameraId.string(), format, dynamicRangeProfile);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
+            physicalCameraMetadata)) {
+        String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
+                " failed to create output stream", logicalCameraId.string(), streamUseCase);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
+            timestampBase > OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED) {
+        String8 msg = String8::format("Camera %s: invalid timestamp base %d",
+                logicalCameraId.string(), timestampBase);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
+            mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
+        String8 msg = String8::format("Camera %s: invalid mirroring mode %d",
+                logicalCameraId.string(), mirrorMode);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
 
     if (!isStreamInfoValid) {
         streamInfo.width = width;
@@ -397,6 +479,10 @@
         streamInfo.dataSpace = dataSpace;
         streamInfo.consumerUsage = consumerUsage;
         streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
+        streamInfo.dynamicRangeProfile = dynamicRangeProfile;
+        streamInfo.streamUseCase = streamUseCase;
+        streamInfo.timestampBase = timestampBase;
+        streamInfo.mirrorMode = mirrorMode;
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
@@ -437,114 +523,49 @@
     return binder::Status::ok();
 }
 
-void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
+void mapStreamInfo(const OutputStreamInfo &streamInfo,
             camera3::camera_stream_rotation_t rotation, String8 physicalId,
-            int32_t groupId, hardware::camera::device::V3_7::Stream *stream /*out*/) {
+            int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
     if (stream == nullptr) {
         return;
     }
 
-    stream->v3_4.v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
-    stream->v3_4.v3_2.width = streamInfo.width;
-    stream->v3_4.v3_2.height = streamInfo.height;
-    stream->v3_4.v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
+    stream->streamType = aidl::android::hardware::camera::device::StreamType::OUTPUT;
+    stream->width = streamInfo.width;
+    stream->height = streamInfo.height;
+    stream->format = AidlCamera3Device::mapToAidlPixelFormat(streamInfo.format);
     auto u = streamInfo.consumerUsage;
     camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
-    stream->v3_4.v3_2.usage = Camera3Device::mapToConsumerUsage(u);
-    stream->v3_4.v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
-    stream->v3_4.v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
-    stream->v3_4.v3_2.id = -1; // Invalid stream id
-    stream->v3_4.physicalCameraId = std::string(physicalId.string());
-    stream->v3_4.bufferSize = 0;
+    stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
+    stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+    stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
+    stream->id = -1; // Invalid stream id
+    stream->physicalCameraId = std::string(physicalId.string());
+    stream->bufferSize = 0;
     stream->groupId = groupId;
     stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
     size_t idx = 0;
+    using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
     for (auto mode : streamInfo.sensorPixelModesUsed) {
         stream->sensorPixelModesUsed[idx++] =
-                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+                static_cast<SensorPixelMode>(mode);
     }
-}
-
-binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
-        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
-        const String8 &logicalCameraId) {
-    if (physicalCameraId.size() == 0) {
-        return binder::Status::ok();
-    }
-    if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
-        physicalCameraId.string()) == physicalCameraIds.end()) {
-        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
-                logicalCameraId.string(), physicalCameraId.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
-    }
-    return binder::Status::ok();
-}
-
-binder::Status SessionConfigurationUtils::checkSurfaceType(size_t numBufferProducers,
-        bool deferredConsumer, int surfaceType)  {
-    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
-        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
-                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
-    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
-        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
-    }
-
-    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
-            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
-    if (deferredConsumer && !validSurfaceType) {
-        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
-    }
-
-    return binder::Status::ok();
-}
-
-binder::Status SessionConfigurationUtils::checkOperatingMode(int operatingMode,
-        const CameraMetadata &staticInfo, const String8 &cameraId) {
-    if (operatingMode < 0) {
-        String8 msg = String8::format(
-            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
-    }
-
-    bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
-    if (isConstrainedHighSpeed) {
-        camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
-        bool isConstrainedHighSpeedSupported = false;
-        for(size_t i = 0; i < entry.count; ++i) {
-            uint8_t capability = entry.data.u8[i];
-            if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
-                isConstrainedHighSpeedSupported = true;
-                break;
-            }
-        }
-        if (!isConstrainedHighSpeedSupported) {
-            String8 msg = String8::format(
-                "Camera %s: Try to create a constrained high speed configuration on a device"
-                " that doesn't support it.", cameraId.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                    msg.string());
-        }
-    }
-
-    return binder::Status::ok();
+    using DynamicRangeProfile =
+            aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
+    stream->dynamicRangeProfile = static_cast<DynamicRangeProfile>(streamInfo.dynamicRangeProfile);
+    using StreamUseCases =
+            aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases;
+    stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
 binder::Status
-SessionConfigurationUtils::convertToHALStreamCombination(
+convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
         const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
+        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit) {
-
+    using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
     auto operatingMode = sessionConfiguration.getOperatingMode();
     binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
     if (!res.isOk()) {
@@ -557,7 +578,7 @@
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
     *earlyExit = false;
-    auto ret = Camera3Device::mapToStreamConfigurationMode(
+    auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
             static_cast<camera_stream_configuration_mode_t> (operatingMode),
             /*out*/ &streamConfiguration.operationMode);
     if (ret != OK) {
@@ -578,19 +599,31 @@
     streamConfiguration.streams.resize(streamCount);
     size_t streamIdx = 0;
     if (isInputValid) {
-        hardware::hidl_vec<CameraMetadataEnumAndroidSensorPixelMode> defaultSensorPixelModes;
+        std::vector<SensorPixelMode> defaultSensorPixelModes;
         defaultSensorPixelModes.resize(1);
         defaultSensorPixelModes[0] =
-                static_cast<CameraMetadataEnumAndroidSensorPixelMode>(
-                        ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
-        streamConfiguration.streams[streamIdx++] = {{{/*streamId*/0,
-                hardware::camera::device::V3_2::StreamType::INPUT,
-                static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
-                static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
-                Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
-                /*usage*/ 0, HAL_DATASPACE_UNKNOWN,
-                hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
-                /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1, defaultSensorPixelModes};
+                static_cast<SensorPixelMode>(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+        aidl::android::hardware::camera::device::Stream stream;
+        stream.id = 0;
+        stream.streamType =  aidl::android::hardware::camera::device::StreamType::INPUT;
+        stream.width = static_cast<uint32_t> (sessionConfiguration.getInputWidth());
+        stream.height =  static_cast<uint32_t> (sessionConfiguration.getInputHeight());
+        stream.format =
+                AidlCamera3Device::AidlCamera3Device::mapToAidlPixelFormat(
+                        sessionConfiguration.getInputFormat());
+        stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0);
+        stream.dataSpace =
+              static_cast<aidl::android::hardware::graphics::common::Dataspace>(
+                      HAL_DATASPACE_UNKNOWN);
+        stream.rotation = aidl::android::hardware::camera::device::StreamRotation::ROTATION_0;
+        stream.bufferSize = 0;
+        stream.groupId = -1;
+        stream.sensorPixelModesUsed = defaultSensorPixelModes;
+        using DynamicRangeProfile =
+            aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
+        stream.dynamicRangeProfile =
+            DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+        streamConfiguration.streams[streamIdx++] = stream;
         streamConfiguration.multiResolutionInputImage =
                 sessionConfiguration.inputIsMultiResolution();
     }
@@ -601,6 +634,7 @@
         bool deferredConsumer = it.isDeferred();
         String8 physicalCameraId = String8(it.getPhysicalCameraId());
 
+        int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
         std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
         const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
                 overrideForPerfClass);
@@ -622,6 +656,9 @@
             return res;
         }
 
+        int64_t streamUseCase = it.getStreamUseCase();
+        int timestampBase = it.getTimestampBase();
+        int mirrorMode = it.getMirrorMode();
         if (deferredConsumer) {
             streamInfo.width = it.getWidth();
             streamInfo.height = it.getHeight();
@@ -632,6 +669,7 @@
             if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
                 streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
             }
+            streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
                     streamInfo.format, streamInfo.width,
                     streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
@@ -641,6 +679,7 @@
                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                                 "Deferred surface sensor pixel modes not valid");
             }
+            streamInfo.streamUseCase = streamUseCase;
             mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
                     &streamConfiguration.streams[streamIdx++]);
             isStreamInfoValid = true;
@@ -653,7 +692,8 @@
         for (auto& bufferProducer : bufferProducers) {
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
-                    logicalCameraId, metadataChosen, sensorPixelModesUsed);
+                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
+                    streamUseCase, timestampBase, mirrorMode);
 
             if (!res.isOk())
                 return res;
@@ -711,6 +751,78 @@
     return binder::Status::ok();
 }
 
+binder::Status checkPhysicalCameraId(
+        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+        const String8 &logicalCameraId) {
+    if (physicalCameraId.size() == 0) {
+        return binder::Status::ok();
+    }
+    if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
+        physicalCameraId.string()) == physicalCameraIds.end()) {
+        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
+                logicalCameraId.string(), physicalCameraId.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+    }
+    return binder::Status::ok();
+}
+
+binder::Status checkSurfaceType(size_t numBufferProducers,
+        bool deferredConsumer, int surfaceType)  {
+    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
+        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
+                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
+    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
+    }
+
+    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+
+    if (deferredConsumer && !validSurfaceType) {
+        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+    }
+
+    return binder::Status::ok();
+}
+
+binder::Status checkOperatingMode(int operatingMode,
+        const CameraMetadata &staticInfo, const String8 &cameraId) {
+    if (operatingMode < 0) {
+        String8 msg = String8::format(
+            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
+        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                msg.string());
+    }
+
+    bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
+    if (isConstrainedHighSpeed) {
+        camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+        bool isConstrainedHighSpeedSupported = false;
+        for(size_t i = 0; i < entry.count; ++i) {
+            uint8_t capability = entry.data.u8[i];
+            if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
+                isConstrainedHighSpeedSupported = true;
+                break;
+            }
+        }
+        if (!isConstrainedHighSpeedSupported) {
+            String8 msg = String8::format(
+                "Camera %s: Try to create a constrained high speed configuration on a device"
+                " that doesn't support it.", cameraId.string());
+            ALOGE("%s: %s", __FUNCTION__, msg.string());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                    msg.string());
+        }
+    }
+
+    return binder::Status::ok();
+}
+
 static bool inStreamConfigurationMap(int format, int width, int height,
         const std::unordered_map<int, std::vector<camera3::StreamConfiguration>> &sm) {
     auto scs = sm.find(format);
@@ -729,7 +841,7 @@
     return std::unordered_set<int32_t>(sensorPixelModesUsed.begin(), sensorPixelModesUsed.end());
 }
 
-status_t SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
+status_t checkAndOverrideSensorPixelModesUsed(
         const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
         const CameraMetadata &staticInfo, bool flexibleConsumer,
         std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
@@ -795,44 +907,7 @@
     return OK;
 }
 
-bool SessionConfigurationUtils::isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
-    camera_metadata_ro_entry_t entryCap;
-    entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
-    // Go through the capabilities and check if it has
-    // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
-    for (size_t i = 0; i < entryCap.count; ++i) {
-        uint8_t capability = entryCap.data.u8[i];
-        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
-            return true;
-        }
-    }
-    return false;
-}
-
-bool SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
-        hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
-        const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
-    if (streamConfigV37.multiResolutionInputImage) {
-        // ICameraDevice older than 3.7 doesn't support multi-resolution input image.
-        return false;
-    }
-
-    streamConfigV34.streams.resize(streamConfigV37.streams.size());
-    for (size_t i = 0; i < streamConfigV37.streams.size(); i++) {
-        if (streamConfigV37.streams[i].groupId != -1) {
-            // ICameraDevice older than 3.7 doesn't support multi-resolution output
-            // image
-            return false;
-        }
-        streamConfigV34.streams[i] = streamConfigV37.streams[i].v3_4;
-    }
-    streamConfigV34.operationMode = streamConfigV37.operationMode;
-    streamConfigV34.sessionParams = streamConfigV37.sessionParams;
-
-    return true;
-}
-
-bool SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+bool targetPerfClassPrimaryCamera(
         const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
         int targetSdkVersion) {
     bool isPerfClassPrimaryCamera =
@@ -840,5 +915,6 @@
     return targetSdkVersion >= SDK_VERSION_S && isPerfClassPrimaryCamera;
 }
 
+} // namespace SessionConfigurationUtils
 } // namespace camera3
 } // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 192e241..a127c7b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -21,15 +21,18 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
-#include <android/hardware/camera/device/3.7/types.h>
+#include <aidl/android/hardware/camera/device/ICameraDevice.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
 
 #include <device3/Camera3StreamInterface.h>
+#include <utils/IPCTransport.h>
 
 #include <set>
 #include <stdint.h>
 
+#include "SessionConfigurationUtilsHost.h"
+
 // Convenience methods for constructing binder::Status objects for error returns
 
 #define STATUS_ERROR(errorCode, errorString) \
@@ -44,7 +47,7 @@
 namespace android {
 namespace camera3 {
 
-typedef std::function<CameraMetadata (const String8 &, int targetSdkVersion)> metadataGetter;
+typedef std::function<CameraMetadata (const String8 &, bool overrideForPerfClass)> metadataGetter;
 
 class StreamConfiguration {
 public:
@@ -69,96 +72,92 @@
             mMaximumResolutionStreamConfigurationMap;
 };
 
-class SessionConfigurationUtils {
-public:
-    static camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
-            bool ultraHighResolution);
+namespace SessionConfigurationUtils {
 
-    static size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
-            camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize);
+camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
+        bool ultraHighResolution);
 
-    static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
+size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+        camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize);
 
-    // Find the closest dimensions for a given format in available stream configurations with
-    // a width <= ROUNDING_WIDTH_CAP
-    static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
-            android_dataspace dataSpace, const CameraMetadata& info, bool maxResolution,
-            /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
 
-    static bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo, int32_t arrayTag,
-            int32_t *width, int32_t *height);
+// Find the closest dimensions for a given format in available stream configurations with
+// a width <= ROUNDING_WIDTH_CAP
+bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format,
+        android_dataspace dataSpace, const CameraMetadata& info, bool maxResolution,
+        /*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
 
-    //check if format is not custom format
-    static bool isPublicFormat(int32_t format);
+// check if format is not custom format
+bool isPublicFormat(int32_t format);
 
-    // Create a Surface from an IGraphicBufferProducer. Returns error if
-    // IGraphicBufferProducer's property doesn't match with streamInfo
-    static binder::Status createSurfaceFromGbp(
+// Create a Surface from an IGraphicBufferProducer. Returns error if
+// IGraphicBufferProducer's property doesn't match with streamInfo
+binder::Status createSurfaceFromGbp(
         camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
         const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
-        const std::vector<int32_t> &sensorPixelModesUsed);
+        const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
+        int64_t streamUseCase, int timestampBase, int mirrorMode);
 
-    static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
-            camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
-            hardware::camera::device::V3_7::Stream *stream /*out*/);
+//check if format is 10-bit output compatible
+bool is10bitCompatibleFormat(int32_t format);
 
-    // Check that the physicalCameraId passed in is spported by the camera
-    // device.
-    static binder::Status checkPhysicalCameraId(
-        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
-        const String8 &logicalCameraId);
+// check if the dynamic range requires 10-bit output
+bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
 
-    static binder::Status checkSurfaceType(size_t numBufferProducers,
-        bool deferredConsumer, int surfaceType);
+// Check if the device supports a given dynamicRangeProfile
+bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
 
-    static binder::Status checkOperatingMode(int operatingMode,
-        const CameraMetadata &staticInfo, const String8 &cameraId);
+bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
 
-    // utility function to convert AIDL SessionConfiguration to HIDL
-    // streamConfiguration. Also checks for validity of SessionConfiguration and
-    // returns a non-ok binder::Status if the passed in session configuration
-    // isn't valid.
-    static binder::Status
-    convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
-            const String8 &cameraId, const CameraMetadata &deviceInfo,
-            metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
-            hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
-            bool overrideForPerfClass, bool *earlyExit);
+void mapStreamInfo(const OutputStreamInfo &streamInfo,
+        camera3::camera_stream_rotation_t rotation, String8 physicalId,
+        int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/);
 
-    // Utility function to convert a V3_7::StreamConfiguration to
-    // V3_4::StreamConfiguration. Return false if the original V3_7 configuration cannot
-    // be used by older version HAL.
-    static bool convertHALStreamCombinationFromV37ToV34(
-            hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
-            const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37);
+// Check that the physicalCameraId passed in is spported by the camera
+// device.
+binder::Status checkPhysicalCameraId(
+const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
+const String8 &logicalCameraId);
 
-    static StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
+binder::Status checkSurfaceType(size_t numBufferProducers,
+bool deferredConsumer, int surfaceType);
 
-    static status_t checkAndOverrideSensorPixelModesUsed(
-            const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
-            const CameraMetadata &staticInfo, bool flexibleConsumer,
-            std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
+binder::Status checkOperatingMode(int operatingMode,
+const CameraMetadata &staticInfo, const String8 &cameraId);
 
-    static bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+binder::Status
+convertToHALStreamCombination(
+    const SessionConfiguration& sessionConfiguration,
+    const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+    metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+    aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
+    bool overrideForPerfClass, bool *earlyExit);
 
-    static int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
+StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
 
-    static bool targetPerfClassPrimaryCamera(
-            const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
-            int32_t targetSdkVersion);
+status_t checkAndOverrideSensorPixelModesUsed(
+        const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
+        const CameraMetadata &staticInfo, bool flexibleConsumer,
+        std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
 
-    static const int32_t MAX_SURFACES_PER_STREAM = 4;
+bool targetPerfClassPrimaryCamera(
+        const std::set<std::string>& perfClassPrimaryCameraIds, const std::string& cameraId,
+        int32_t targetSdkVersion);
 
-    static const int32_t ROUNDING_WIDTH_CAP = 1920;
+constexpr int32_t MAX_SURFACES_PER_STREAM = 4;
 
-    static const int32_t SDK_VERSION_S = 31;
-    static int32_t PERF_CLASS_LEVEL;
-    static bool IS_PERF_CLASS;
-    static const int32_t PERF_CLASS_JPEG_THRESH_W = 1920;
-    static const int32_t PERF_CLASS_JPEG_THRESH_H = 1080;
-};
+constexpr int32_t ROUNDING_WIDTH_CAP = 1920;
 
+constexpr int32_t SDK_VERSION_S = 31;
+extern int32_t PERF_CLASS_LEVEL;
+extern bool IS_PERF_CLASS;
+constexpr int32_t PERF_CLASS_JPEG_THRESH_W = 1920;
+constexpr int32_t PERF_CLASS_JPEG_THRESH_H = 1080;
+
+} // SessionConfigurationUtils
 } // camera3
 } // android
+
 #endif
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
new file mode 100644
index 0000000..4e6f832
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/properties.h>
+
+#include "SessionConfigurationUtils.h"
+#include "SessionConfigurationUtilsHidl.h"
+
+#include "../CameraService.h"
+#include "device3/aidl/AidlCamera3Device.h"
+#include "device3/hidl/HidlCamera3Device.h"
+#include "device3/Camera3OutputStream.h"
+
+using android::camera3::OutputStreamInfo;
+using android::hardware::camera2::ICameraDeviceUser;
+using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
+
+namespace android {
+namespace camera3 {
+
+namespace SessionConfigurationUtils {
+
+status_t
+convertAidlToHidl37StreamCombination(
+        const aidl::android::hardware::camera::device::StreamConfiguration &aidl,
+        hardware::camera::device::V3_7::StreamConfiguration &hidl) {
+    hidl.operationMode =
+        static_cast<hardware::camera::device::V3_2::StreamConfigurationMode>(aidl.operationMode);
+    if (aidl.streamConfigCounter < 0) {
+        return BAD_VALUE;
+    }
+    hidl.streamConfigCounter = static_cast<uint32_t>(aidl.streamConfigCounter);
+    hidl.multiResolutionInputImage = aidl.multiResolutionInputImage;
+    hidl.sessionParams = aidl.sessionParams.metadata;
+    hidl.streams.resize(aidl.streams.size());
+    size_t i = 0;
+    for (const auto &stream : aidl.streams) {
+        if (static_cast<int>(stream.dynamicRangeProfile) !=
+                ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+            ALOGE("%s  Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
+                    stream.dynamicRangeProfile);
+            return BAD_VALUE;
+        }
+
+        if (static_cast<int>(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+            ALOGE("%s Stream use case %" PRId64 "not supported by HIDL", __FUNCTION__,
+                    stream.useCase);
+            return BAD_VALUE;
+        }
+
+        // hidl v3_7
+        hidl.streams[i].groupId = stream.groupId;
+        hidl.streams[i].sensorPixelModesUsed.resize(stream.sensorPixelModesUsed.size());
+        size_t j = 0;
+        for (const auto &mode : stream.sensorPixelModesUsed) {
+            hidl.streams[i].sensorPixelModesUsed[j] =
+                    static_cast<CameraMetadataEnumAndroidSensorPixelMode>(mode);
+            j++;
+        }
+
+        //hidl v3_4
+        hidl.streams[i].v3_4.physicalCameraId = stream.physicalCameraId;
+
+        if (stream.bufferSize < 0) {
+            return BAD_VALUE;
+        }
+        hidl.streams[i].v3_4.bufferSize = static_cast<uint32_t>(stream.bufferSize);
+
+        // hild v3_2
+        hidl.streams[i].v3_4.v3_2.id = stream.id;
+        hidl.streams[i].v3_4.v3_2.format =
+                static_cast<hardware::graphics::common::V1_0::PixelFormat>(stream.format);
+
+        if (stream.width < 0 || stream.height < 0) {
+            return BAD_VALUE;
+        }
+        hidl.streams[i].v3_4.v3_2.width = static_cast<uint32_t>(stream.width);
+        hidl.streams[i].v3_4.v3_2.height = static_cast<uint32_t>(stream.height);
+        hidl.streams[i].v3_4.v3_2.usage =
+                static_cast<hardware::camera::device::V3_2::BufferUsageFlags>(stream.usage);
+        hidl.streams[i].v3_4.v3_2.streamType =
+                static_cast<hardware::camera::device::V3_2::StreamType>(stream.streamType);
+        hidl.streams[i].v3_4.v3_2.dataSpace =
+                static_cast<hardware::camera::device::V3_2::DataspaceFlags>(stream.dataSpace);
+        hidl.streams[i].v3_4.v3_2.rotation =
+                static_cast<hardware::camera::device::V3_2::StreamRotation>(stream.rotation);
+        i++;
+    }
+    return OK;
+}
+
+binder::Status
+convertToHALStreamCombination(
+        const SessionConfiguration& sessionConfiguration,
+        const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+        metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
+        bool overrideForPerfClass, bool *earlyExit) {
+    aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
+    auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
+            getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
+            earlyExit);
+    if (!ret.isOk()) {
+        return ret;
+    }
+    if (earlyExit != nullptr && *earlyExit) {
+        return binder::Status::ok();
+    }
+
+    if (convertAidlToHidl37StreamCombination(aidlStreamConfiguration, streamConfiguration) != OK) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Invalid AIDL->HIDL3.7 conversion");
+    }
+
+    return binder::Status::ok();
+}
+
+bool convertHALStreamCombinationFromV37ToV34(
+        hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+        const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
+    if (streamConfigV37.multiResolutionInputImage) {
+        // ICameraDevice older than 3.7 doesn't support multi-resolution input image.
+        return false;
+    }
+
+    streamConfigV34.streams.resize(streamConfigV37.streams.size());
+    for (size_t i = 0; i < streamConfigV37.streams.size(); i++) {
+        if (streamConfigV37.streams[i].groupId != -1) {
+            // ICameraDevice older than 3.7 doesn't support multi-resolution output
+            // image
+            return false;
+        }
+        streamConfigV34.streams[i] = streamConfigV37.streams[i].v3_4;
+    }
+    streamConfigV34.operationMode = streamConfigV37.operationMode;
+    streamConfigV34.sessionParams = streamConfigV37.sessionParams;
+
+    return true;
+}
+
+} // namespace SessionConfigurationUtils
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
new file mode 100644
index 0000000..c47abe0
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_SESSION_CONFIGURATION_UTILS_HIDL_H
+#define ANDROID_SERVERS_CAMERA_SESSION_CONFIGURATION_UTILS_HIDL_H
+
+#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
+
+#include <utils/SessionConfigurationUtils.h>
+
+// Convenience methods for constructing binder::Status objects for error returns
+
+namespace android {
+namespace camera3 {
+
+namespace SessionConfigurationUtils {
+
+// utility function to convert AIDL SessionConfiguration to HIDL
+// streamConfiguration. Also checks for validity of SessionConfiguration and
+// returns a non-ok binder::Status if the passed in session configuration
+// isn't valid.
+binder::Status
+convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
+        const String8 &cameraId, const CameraMetadata &deviceInfo,
+        metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+        hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
+        bool overrideForPerfClass, bool *earlyExit);
+
+// Utility function to convert a V3_7::StreamConfiguration to
+// V3_4::StreamConfiguration. Return false if the original V3_7 configuration cannot
+// be used by older version HAL.
+bool convertHALStreamCombinationFromV37ToV34(
+        hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+        const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37);
+} // SessionConfigurationUtils
+} // camera3
+} // android
+
+#endif
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
new file mode 100644
index 0000000..1efdc60
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SessionConfigurationUtilsHost.h"
+
+namespace android {
+namespace camera3 {
+namespace SessionConfigurationUtils {
+
+int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution) {
+    if (!maxResolution) {
+        return defaultTag;
+    }
+    switch (defaultTag) {
+        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS:
+            return ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
+            return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
+            return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+        case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
+            return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_INTRINSIC_CALIBRATION:
+            return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
+        case ANDROID_LENS_DISTORTION:
+            return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+        default:
+            ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
+                    defaultTag);
+            return -1;
+    }
+    return -1;
+}
+
+bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+    camera_metadata_ro_entry_t entryCap;
+    entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+    // Go through the capabilities and check if it has
+    // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+    for (size_t i = 0; i < entryCap.count; ++i) {
+        uint8_t capability = entryCap.data.u8[i];
+        if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
+        int32_t arrayTag, int32_t *width, int32_t *height) {
+    if (width == nullptr || height == nullptr) {
+        ALOGE("%s: width / height nullptr", __FUNCTION__);
+        return false;
+    }
+    camera_metadata_ro_entry_t entry;
+    entry = deviceInfo->find(arrayTag);
+    if (entry.count != 4) return false;
+    *width = entry.data.i32[2];
+    *height = entry.data.i32[3];
+    return true;
+}
+
+} // namespace SessionConfigurationUtils
+} // namespace camera3
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
new file mode 100644
index 0000000..45b1e91
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_SESSION_CONFIGURATION_UTILS_HOST_H
+#define ANDROID_SERVERS_CAMERA_SESSION_CONFIGURATION_UTILS_HOST_H
+
+#include "camera/CameraMetadata.h"
+
+namespace android {
+namespace camera3 {
+namespace SessionConfigurationUtils {
+
+bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+
+int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
+
+bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo, int32_t arrayTag,
+        int32_t *width, int32_t *height);
+
+} // SessionConfigurationUtils
+} // camera3
+} // android
+
+#endif
\ No newline at end of file
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
index 7a7707c..c3aac72 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -60,6 +60,7 @@
     mErrorResultCount = 0;
     mCounterStopped = false;
     mDeviceError = false;
+    mUserTag.clear();
     for (auto& streamStats : mStatsMap) {
         StreamStats& streamStat = streamStats.second;
         streamStat.mRequestedFrameCount = 0;
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
index c23abb6..2936531 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.h
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -84,6 +84,7 @@
     int64_t mErrorResultCount;
     bool mCounterStopped;
     bool mDeviceError;
+    std::string mUserTag;
     // Map from stream id to stream statistics
     std::map<int, StreamStats> mStatsMap;
 };
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 262f962..461f5e9 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -24,6 +24,7 @@
 #include <utils/Log.h>
 #include <camera/VendorTagDescriptor.h>
 #include <camera_metadata_hidden.h>
+#include <device3/Camera3Stream.h>
 
 namespace android {
 
@@ -112,11 +113,15 @@
     mLastMonitoredResultValues.clear();
     mLastMonitoredPhysicalRequestKeys.clear();
     mLastMonitoredPhysicalResultKeys.clear();
+    mLastStreamIds.clear();
+    mLastInputStreamId = -1;
 }
 
 void TagMonitor::monitorMetadata(eventSource source, int64_t frameNumber, nsecs_t timestamp,
         const CameraMetadata& metadata,
-        const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
+        const std::unordered_map<std::string, CameraMetadata>& physicalMetadata,
+        const camera3::camera_stream_buffer_t *outputBuffers, uint32_t numOutputBuffers,
+        int32_t inputStreamId) {
     if (!mMonitoringEnabled) return;
 
     std::lock_guard<std::mutex> lock(mMonitorMutex);
@@ -124,19 +129,27 @@
     if (timestamp == 0) {
         timestamp = systemTime(SYSTEM_TIME_BOOTTIME);
     }
-
+    std::unordered_set<int32_t> outputStreamIds;
+    for (size_t i = 0; i < numOutputBuffers; i++) {
+        const camera3::camera_stream_buffer_t *src = outputBuffers + i;
+        int32_t streamId = camera3::Camera3Stream::cast(src->stream)->getId();
+        outputStreamIds.emplace(streamId);
+    }
     std::string emptyId;
     for (auto tag : mMonitoredTagList) {
-        monitorSingleMetadata(source, frameNumber, timestamp, emptyId, tag, metadata);
+        monitorSingleMetadata(source, frameNumber, timestamp, emptyId, tag, metadata,
+                outputStreamIds, inputStreamId);
 
         for (auto& m : physicalMetadata) {
-            monitorSingleMetadata(source, frameNumber, timestamp, m.first, tag, m.second);
+            monitorSingleMetadata(source, frameNumber, timestamp, m.first, tag, m.second,
+                    outputStreamIds, inputStreamId);
         }
     }
 }
 
 void TagMonitor::monitorSingleMetadata(eventSource source, int64_t frameNumber, nsecs_t timestamp,
-        const std::string& cameraId, uint32_t tag, const CameraMetadata& metadata) {
+        const std::string& cameraId, uint32_t tag, const CameraMetadata& metadata,
+        const std::unordered_set<int32_t> &outputStreamIds, int32_t inputStreamId) {
 
     CameraMetadata &lastValues = (source == REQUEST) ?
             (cameraId.empty() ? mLastMonitoredRequestValues :
@@ -177,13 +190,22 @@
             // No last entry, so always consider to be different
             isDifferent = true;
         }
-
+        // Also monitor when the stream ids change, this helps visually see what
+        // monitored metadata values are for capture requests with different
+        // stream ids.
+        if (source == REQUEST &&
+                (inputStreamId != mLastInputStreamId || outputStreamIds != mLastStreamIds)) {
+            mLastInputStreamId = inputStreamId;
+            mLastStreamIds = outputStreamIds;
+            isDifferent = true;
+        }
         if (isDifferent) {
             ALOGV("%s: Tag %s changed", __FUNCTION__,
                   get_local_camera_metadata_tag_name_vendor_id(
                           tag, mVendorTagId));
             lastValues.update(entry);
-            mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId);
+            mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId,
+                    outputStreamIds, inputStreamId);
         }
     } else if (lastEntry.count > 0) {
         // Value has been removed
@@ -195,7 +217,10 @@
         entry.type = get_local_camera_metadata_tag_type_vendor_id(tag,
                 mVendorTagId);
         entry.count = 0;
-        mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId);
+        mLastInputStreamId = inputStreamId;
+        mLastStreamIds = outputStreamIds;
+        mMonitoringEvents.emplace(source, frameNumber, timestamp, entry, cameraId, outputStreamIds,
+                inputStreamId);
     }
 }
 
@@ -214,37 +239,59 @@
     } else {
         dprintf(fd, "     Tag monitoring disabled (enable with -m <name1,..,nameN>)\n");
     }
-    if (mMonitoringEvents.size() > 0) {
-        dprintf(fd, "     Monitored tag event log:\n");
-        for (const auto& event : mMonitoringEvents) {
-            int indentation = (event.source == REQUEST) ? 15 : 30;
-            dprintf(fd, "        f%d:%" PRId64 "ns:%*s%*s%s.%s: ",
-                    event.frameNumber, event.timestamp,
-                    2, event.cameraId.c_str(),
-                    indentation,
-                    event.source == REQUEST ? "REQ:" : "RES:",
-                    get_local_camera_metadata_section_name_vendor_id(event.tag,
-                            mVendorTagId),
-                    get_local_camera_metadata_tag_name_vendor_id(event.tag,
-                            mVendorTagId));
-            if (event.newData.size() == 0) {
-                dprintf(fd, " (Removed)\n");
-            } else {
-                printData(fd, event.newData.data(), event.tag,
-                        event.type, event.newData.size() / camera_metadata_type_size[event.type],
-                        indentation + 18);
-            }
-        }
-    }
 
+    if (mMonitoringEvents.size() == 0) { return; }
+
+    dprintf(fd, "     Monitored tag event log:\n");
+
+    std::vector<std::string> eventStrs;
+    dumpMonitoredTagEventsToVectorLocked(eventStrs);
+    for (const std::string &eventStr : eventStrs) {
+        dprintf(fd, "        %s", eventStr.c_str());
+    }
 }
 
-// TODO: Consolidate with printData from camera_metadata.h
+void TagMonitor::getLatestMonitoredTagEvents(std::vector<std::string> &out) {
+    std::lock_guard<std::mutex> lock(mMonitorMutex);
+    dumpMonitoredTagEventsToVectorLocked(out);
+}
+
+void TagMonitor::dumpMonitoredTagEventsToVectorLocked(std::vector<std::string> &vec) {
+    if (mMonitoringEvents.size() == 0) { return; }
+
+    for (const auto& event : mMonitoringEvents) {
+        int indentation = (event.source == REQUEST) ? 15 : 30;
+        String8 eventString = String8::format("f%d:%" PRId64 "ns:%*s%*s%s.%s: ",
+                event.frameNumber, event.timestamp,
+                2, event.cameraId.c_str(),
+                indentation,
+                event.source == REQUEST ? "REQ:" : "RES:",
+                get_local_camera_metadata_section_name_vendor_id(event.tag, mVendorTagId),
+                get_local_camera_metadata_tag_name_vendor_id(event.tag, mVendorTagId));
+        if (event.newData.size() == 0) {
+            eventString += " (Removed)";
+        } else {
+            eventString += getEventDataString(event.newData.data(),
+                                    event.tag,
+                                    event.type,
+                                    event.newData.size() / camera_metadata_type_size[event.type],
+                                    indentation + 18,
+                                    event.outputStreamIds,
+                                    event.inputStreamId);
+        }
+        vec.emplace_back(eventString.string());
+    }
+}
 
 #define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 29
 
-void TagMonitor::printData(int fd, const uint8_t *data_ptr, uint32_t tag,
-        int type, int count, int indentation) {
+String8 TagMonitor::getEventDataString(const uint8_t* data_ptr,
+                                    uint32_t tag,
+                                    int type,
+                                    int count,
+                                    int indentation,
+                                    const std::unordered_set<int32_t>& outputStreamIds,
+                                    int32_t inputStreamId) {
     static int values_per_line[NUM_TYPES] = {
         [TYPE_BYTE]     = 16,
         [TYPE_INT32]    = 8,
@@ -253,6 +300,7 @@
         [TYPE_DOUBLE]   = 4,
         [TYPE_RATIONAL] = 4,
     };
+
     size_t type_size = camera_metadata_type_size[type];
     char value_string_tmp[CAMERA_METADATA_ENUM_STRING_MAX_SIZE];
     uint32_t value;
@@ -260,10 +308,11 @@
     int lines = count / values_per_line[type];
     if (count % values_per_line[type] != 0) lines++;
 
+    String8 returnStr = String8();
     int index = 0;
     int j, k;
     for (j = 0; j < lines; j++) {
-        dprintf(fd, "%*s[", (j != 0) ? indentation + 4 : 0, "");
+        returnStr.appendFormat("%*s[", (j != 0) ? indentation + 4 : 0, "");
         for (k = 0;
              k < values_per_line[type] && count > 0;
              k++, count--, index += type_size) {
@@ -276,10 +325,9 @@
                                                      value_string_tmp,
                                                      sizeof(value_string_tmp))
                         == OK) {
-                        dprintf(fd, "%s ", value_string_tmp);
+                        returnStr += value_string_tmp;
                     } else {
-                        dprintf(fd, "%hhu ",
-                                *(data_ptr + index));
+                        returnStr.appendFormat("%hhu", *(data_ptr + index));
                     }
                     break;
                 case TYPE_INT32:
@@ -290,49 +338,57 @@
                                                      value_string_tmp,
                                                      sizeof(value_string_tmp))
                         == OK) {
-                        dprintf(fd, "%s ", value_string_tmp);
+                        returnStr += value_string_tmp;
                     } else {
-                        dprintf(fd, "%" PRId32 " ",
-                                *(int32_t*)(data_ptr + index));
+                        returnStr.appendFormat("%" PRId32 " ", *(int32_t*)(data_ptr + index));
                     }
                     break;
                 case TYPE_FLOAT:
-                    dprintf(fd, "%0.8f ",
-                            *(float*)(data_ptr + index));
+                    returnStr.appendFormat("%0.8f", *(float*)(data_ptr + index));
                     break;
                 case TYPE_INT64:
-                    dprintf(fd, "%" PRId64 " ",
-                            *(int64_t*)(data_ptr + index));
+                    returnStr.appendFormat("%" PRId64 " ", *(int64_t*)(data_ptr + index));
                     break;
                 case TYPE_DOUBLE:
-                    dprintf(fd, "%0.8f ",
-                            *(double*)(data_ptr + index));
+                    returnStr.appendFormat("%0.8f ", *(double*)(data_ptr + index));
                     break;
                 case TYPE_RATIONAL: {
                     int32_t numerator = *(int32_t*)(data_ptr + index);
                     int32_t denominator = *(int32_t*)(data_ptr + index + 4);
-                    dprintf(fd, "(%d / %d) ",
-                            numerator, denominator);
+                    returnStr.appendFormat("(%d / %d) ", numerator, denominator);
                     break;
                 }
                 default:
-                    dprintf(fd, "??? ");
+                    returnStr += "??? ";
             }
         }
-        dprintf(fd, "]\n");
+        returnStr += "] ";
+        if (!outputStreamIds.empty()) {
+            returnStr += "output stream ids: ";
+            for (const auto &id : outputStreamIds) {
+                returnStr.appendFormat(" %d ", id);
+            }
+        }
+        if (inputStreamId != -1) {
+            returnStr.appendFormat("input stream id: %d", inputStreamId);
+        }
+        returnStr += "\n";
     }
+    return returnStr;
 }
 
 template<typename T>
 TagMonitor::MonitorEvent::MonitorEvent(eventSource src, uint32_t frameNumber, nsecs_t timestamp,
-        const T &value, const std::string& cameraId) :
+        const T &value, const std::string& cameraId,
+        const std::unordered_set<int32_t> &outputStreamIds,
+        int32_t inputStreamId) :
         source(src),
         frameNumber(frameNumber),
         timestamp(timestamp),
         tag(value.tag),
         type(value.type),
         newData(value.data.u8, value.data.u8 + camera_metadata_type_size[value.type] * value.count),
-        cameraId(cameraId) {
+        cameraId(cameraId), outputStreamIds(outputStreamIds), inputStreamId(inputStreamId) {
 }
 
 TagMonitor::MonitorEvent::~MonitorEvent() {
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 413f502..088d6fe 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -30,6 +30,7 @@
 #include <system/camera_metadata.h>
 #include <system/camera_vendor_tags.h>
 #include <camera/CameraMetadata.h>
+#include <device3/InFlightRequest.h>
 
 namespace android {
 
@@ -66,19 +67,35 @@
     // Scan through the metadata and update the monitoring information
     void monitorMetadata(eventSource source, int64_t frameNumber,
             nsecs_t timestamp, const CameraMetadata& metadata,
-            const std::unordered_map<std::string, CameraMetadata>& physicalMetadata);
+            const std::unordered_map<std::string, CameraMetadata>& physicalMetadata,
+            const camera3::camera_stream_buffer_t *outputBuffers = nullptr,
+            uint32_t numOutputBuffers = 0, int32_t inputStreamId = -1);
 
     // Dump current event log to the provided fd
     void dumpMonitoredMetadata(int fd);
 
-  private:
+    // Dumps the latest monitored Tag events to the passed vector.
+    // NOTE: The events are appended to the vector in reverser chronological order
+    // (i.e. most recent first)
+    void getLatestMonitoredTagEvents(std::vector<std::string> &out);
 
-    static void printData(int fd, const uint8_t *data_ptr, uint32_t tag,
-            int type, int count, int indentation);
+  private:
+    // Dumps monitored tag events to the passed vector without acquiring
+    // mMonitorMutex. mMonitorMutex must be acquired before calling this
+    // function.
+    void dumpMonitoredTagEventsToVectorLocked(std::vector<std::string> &out);
+
+    static String8 getEventDataString(const uint8_t *data_ptr,
+                                       uint32_t tag, int type,
+                                       int count,
+                                       int indentation,
+                                       const std::unordered_set<int32_t> &outputStreamIds,
+                                       int32_t inputStreamId);
 
     void monitorSingleMetadata(TagMonitor::eventSource source, int64_t frameNumber,
             nsecs_t timestamp, const std::string& cameraId, uint32_t tag,
-            const CameraMetadata& metadata);
+            const CameraMetadata& metadata, const std::unordered_set<int32_t> &outputStreamIds,
+            int32_t inputStreamId);
 
     std::atomic<bool> mMonitoringEnabled;
     std::mutex mMonitorMutex;
@@ -93,6 +110,9 @@
     std::unordered_map<std::string, CameraMetadata> mLastMonitoredPhysicalRequestKeys;
     std::unordered_map<std::string, CameraMetadata> mLastMonitoredPhysicalResultKeys;
 
+    int32_t mLastInputStreamId = -1;
+    std::unordered_set<int32_t> mLastStreamIds;
+
     /**
      * A monitoring event
      * Stores a new metadata field value and the timestamp at which it changed.
@@ -101,7 +121,8 @@
     struct MonitorEvent {
         template<typename T>
         MonitorEvent(eventSource src, uint32_t frameNumber, nsecs_t timestamp,
-                const T &newValue, const std::string& cameraId);
+                const T &newValue, const std::string& cameraId,
+                const std::unordered_set<int32_t> &outputStreamIds, int32_t inputStreamId);
         ~MonitorEvent();
 
         eventSource source;
@@ -111,6 +132,8 @@
         uint8_t type;
         std::vector<uint8_t> newData;
         std::string cameraId;
+        std::unordered_set<int32_t> outputStreamIds;
+        int32_t inputStreamId = 1;
     };
 
     // A ring buffer for tracking the last kMaxMonitorEvents metadata changes
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index bf4d524..4488efb 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -19,6 +19,7 @@
     name: "mediaswcodec",
     vendor_available: true,
     min_sdk_version: "29",
+    apex_available: ["com.android.media.swcodec"],
 
     srcs: [
         "main_swcodecservice.cpp",
diff --git a/services/mediacodec/OWNERS b/services/mediacodec/OWNERS
index c716cce..3453a76 100644
--- a/services/mediacodec/OWNERS
+++ b/services/mediacodec/OWNERS
@@ -1,2 +1,3 @@
 jeffv@google.com
-marcone@google.com
+essick@google.com
+wonsik@google.com
diff --git a/services/mediacodec/android.hardware.media.omx@1.0-service.rc b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
index 3ef9a85..845e5cc 100644
--- a/services/mediacodec/android.hardware.media.omx@1.0-service.rc
+++ b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
@@ -3,4 +3,4 @@
     user mediacodec
     group camera drmrpc mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 696b967..12cc32a 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -7,9 +7,15 @@
     default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libmedia_codecserviceregistrant",
     vendor_available: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
     srcs: [
         "CodecServiceRegistrant.cpp",
     ],
diff --git a/services/mediacodec/registrant/fuzzer/Android.bp b/services/mediacodec/registrant/fuzzer/Android.bp
new file mode 100644
index 0000000..43afbf1
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
+}
+
+cc_fuzz {
+    name: "codecServiceRegistrant_fuzzer",
+    srcs: [
+        "codecServiceRegistrant_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libmedia_codecserviceregistrant",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    defaults: [
+        "libcodec2-hidl-defaults",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/mediacodec/registrant/fuzzer/README.md b/services/mediacodec/registrant/fuzzer/README.md
new file mode 100644
index 0000000..0ffa063
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libmedia_codecserviceregistrant
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedia_codecserviceregistrant is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libmedia_codecserviceregistrant supports the following parameters:
+1. C2String (parameter name: `c2String`)
+2. Width (parameter name: `width`)
+3. Height (parameter name: `height`)
+4. SamplingRate (parameter name: `samplingRate`)
+5. Channels (parameter name: `channels`)
+6. Stream (parameter name: `stream`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `c2String` |`String` | Value obtained from FuzzedDataProvider|
+| `width` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `height` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `samplingRate` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `channels` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `stream` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the libmedia_codecserviceregistrant module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build codecServiceRegistrant_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) codecServiceRegistrant_fuzzer
+```
+#### Steps to run
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/codecServiceRegistrant_fuzzer/codecServiceRegistrant_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
new file mode 100644
index 0000000..e5983e4
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "../CodecServiceRegistrant.cpp"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <C2Config.h>
+#include <C2Param.h>
+
+using namespace std;
+
+constexpr char kServiceName[] = "software";
+
+class CodecServiceRegistrantFuzzer {
+public:
+  void process(const uint8_t *data, size_t size);
+  ~CodecServiceRegistrantFuzzer() {
+    delete mH2C2;
+    if (mInputSize) {
+      delete mInputSize;
+    }
+    if (mSampleRateInfo) {
+      delete mSampleRateInfo;
+    }
+    if (mChannelCountInfo) {
+      delete mChannelCountInfo;
+    }
+  }
+
+private:
+  void initH2C2ComponentStore();
+  void invokeH2C2ComponentStore();
+  void invokeConfigSM();
+  void invokeQuerySM();
+  H2C2ComponentStore *mH2C2 = nullptr;
+  C2StreamPictureSizeInfo::input *mInputSize = nullptr;
+  C2StreamSampleRateInfo::output *mSampleRateInfo = nullptr;
+  C2StreamChannelCountInfo::output *mChannelCountInfo = nullptr;
+  C2Param::Index mIndex = C2StreamProfileLevelInfo::output::PARAM_TYPE;
+  C2StreamFrameRateInfo::output mFrameRate;
+  FuzzedDataProvider *mFDP = nullptr;
+};
+
+void CodecServiceRegistrantFuzzer::initH2C2ComponentStore() {
+  using namespace ::android::hardware::media::c2;
+  shared_ptr<C2ComponentStore> store =
+      android::GetCodec2PlatformComponentStore();
+  if (!store) {
+    return;
+  }
+  android::sp<V1_1::IComponentStore> storeV1_1 =
+      new V1_1::utils::ComponentStore(store);
+  if (storeV1_1->registerAsService(string(kServiceName)) != android::OK) {
+    return;
+  }
+  string const preferredStoreName = string(kServiceName);
+  sp<IComponentStore> preferredStore =
+      IComponentStore::getService(preferredStoreName.c_str());
+  mH2C2 = new H2C2ComponentStore(preferredStore);
+}
+
+void CodecServiceRegistrantFuzzer::invokeConfigSM() {
+  vector<C2Param *> configParams;
+  uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t samplingRate = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t channels = mFDP->ConsumeIntegral<uint32_t>();
+  if (mFDP->ConsumeBool()) {
+    mInputSize = new C2StreamPictureSizeInfo::input(0u, width, height);
+    configParams.push_back(mInputSize);
+  } else {
+    if (mFDP->ConsumeBool()) {
+      mSampleRateInfo = new C2StreamSampleRateInfo::output(0u, samplingRate);
+      configParams.push_back(mSampleRateInfo);
+    }
+    if (mFDP->ConsumeBool()) {
+      mChannelCountInfo = new C2StreamChannelCountInfo::output(0u, channels);
+      configParams.push_back(mChannelCountInfo);
+    }
+  }
+  vector<unique_ptr<C2SettingResult>> failures;
+  mH2C2->config_sm(configParams, &failures);
+}
+
+void CodecServiceRegistrantFuzzer::invokeQuerySM() {
+  vector<C2Param *> stackParams;
+  vector<C2Param::Index> heapParamIndices;
+  if (mFDP->ConsumeBool()) {
+    stackParams = {};
+    heapParamIndices = {};
+  } else {
+    uint32_t stream = mFDP->ConsumeIntegral<uint32_t>();
+    mFrameRate.setStream(stream);
+    stackParams.push_back(&mFrameRate);
+    heapParamIndices.push_back(mIndex);
+  }
+  vector<unique_ptr<C2Param>> heapParams;
+  mH2C2->query_sm(stackParams, heapParamIndices, &heapParams);
+}
+
+void CodecServiceRegistrantFuzzer::invokeH2C2ComponentStore() {
+  initH2C2ComponentStore();
+  shared_ptr<C2Component> component;
+  shared_ptr<C2ComponentInterface> interface;
+  string c2String = mFDP->ConsumeRandomLengthString();
+  mH2C2->createComponent(c2String, &component);
+  mH2C2->createInterface(c2String, &interface);
+  invokeConfigSM();
+  invokeQuerySM();
+
+  vector<shared_ptr<C2ParamDescriptor>> params;
+  mH2C2->querySupportedParams_nb(&params);
+
+  C2StoreIonUsageInfo usageInfo;
+  std::vector<C2FieldSupportedValuesQuery> query = {
+      C2FieldSupportedValuesQuery::Possible(
+          C2ParamField::Make(usageInfo, usageInfo.usage)),
+      C2FieldSupportedValuesQuery::Possible(
+          C2ParamField::Make(usageInfo, usageInfo.capacity)),
+  };
+  mH2C2->querySupportedValues_sm(query);
+
+  mH2C2->getName();
+  shared_ptr<C2ParamReflector> paramReflector = mH2C2->getParamReflector();
+  if (paramReflector) {
+    paramReflector->describe(C2ComponentDomainSetting::CORE_INDEX);
+  }
+  mH2C2->listComponents();
+  shared_ptr<C2GraphicBuffer> src;
+  shared_ptr<C2GraphicBuffer> dst;
+  mH2C2->copyBuffer(src, dst);
+}
+
+void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  invokeH2C2ComponentStore();
+  /** RegisterCodecServices is called here to improve code coverage */
+  /** as currently it is not called by codecServiceRegistrant       */
+  RegisterCodecServices();
+  delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  CodecServiceRegistrantFuzzer codecServiceRegistrantFuzzer;
+  codecServiceRegistrantFuzzer.process(data, size);
+  return 0;
+}
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 9058f10..41efce0 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -84,5 +84,6 @@
 getgid32: 1
 getegid32: 1
 getgroups32: 1
+sysinfo: 1
 
 @include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
index 4c51a9c..4317ccc 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
@@ -78,5 +78,16 @@
 getgid: 1
 getegid: 1
 getgroups: 1
+sysinfo: 1
+
+# Android profiler (heapprofd, traced_perf) additions, where not already
+# covered by the rest of the file, or by builtin minijail allow-listing of
+# logging-related syscalls.
+# TODO(b/197184220): this is a targeted addition for a specific investigation,
+# and addresses just the arm64 framework av service policies. In the future, we
+# should make this more general (e.g. a central file that can be @included in
+# other policy files).
+setsockopt: 1
+sendmsg: 1
 
 @include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm64.policy
diff --git a/services/mediaextractor/OWNERS b/services/mediaextractor/OWNERS
index c716cce..2a779c2 100644
--- a/services/mediaextractor/OWNERS
+++ b/services/mediaextractor/OWNERS
@@ -1,2 +1,3 @@
 jeffv@google.com
-marcone@google.com
+essick@google.com
+aquilescanta@google.com
diff --git a/services/mediaextractor/TEST_MAPPING b/services/mediaextractor/TEST_MAPPING
new file mode 100644
index 0000000..7a66eeb
--- /dev/null
+++ b/services/mediaextractor/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+    "presubmit": [
+        {
+            "name": "CtsMediaTranscodingTestCases"
+        }
+    ]
+}
diff --git a/services/mediaextractor/mediaextractor.rc b/services/mediaextractor/mediaextractor.rc
index 5fc2941..4fb50d0 100644
--- a/services/mediaextractor/mediaextractor.rc
+++ b/services/mediaextractor/mediaextractor.rc
@@ -3,4 +3,4 @@
     user mediaex
     group drmrpc mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
index 9bbd53b..e54c918 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
@@ -46,5 +46,16 @@
 # Required by Sanitizers
 sched_yield: 1
 
+# Android profiler (heapprofd, traced_perf) additions, where not already
+# covered by the rest of the file, or by builtin minijail allow-listing of
+# logging-related syscalls.
+# TODO(b/197184220): this is a targeted addition for a specific investigation,
+# and addresses just the arm64 framework av service policies. In the future, we
+# should make this more general (e.g. a central file that can be @included in
+# other policy files).
+setsockopt: 1
+sendmsg: 1
+set_tid_address: 1
+
 @include /apex/com.android.media/etc/seccomp_policy/crash_dump.arm64.policy
 @include /apex/com.android.media/etc/seccomp_policy/code_coverage.arm64.policy
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index cfc4c40..8088ef0 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -26,6 +26,7 @@
         "libmediautils",
         "libnblog",
         "libutils",
+        "packagemanager_aidl-cpp",
     ],
 
     cflags: [
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index b2c9465..11534bb 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -114,6 +114,7 @@
         "libmediautils",
         "libutils",
         "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
     header_libs: [
         "libaudioutils_headers",
@@ -172,6 +173,7 @@
         "libstatspull",
         "libstatssocket",
         "libutils",
+        "packagemanager_aidl-cpp",
     ],
 
     export_shared_lib_headers: [
@@ -179,15 +181,22 @@
         "libstatssocket",
     ],
 
+    // within the library, we use "xxx.h"
+    local_include_dirs: [
+        "include/mediametricsservice",
+    ],
+
+    // external parties use <mediametricsservice/xxx.h>
+    export_include_dirs: [
+        "include",
+    ],
+
     static_libs: [
         "libplatformprotos",
     ],
 
     header_libs: [
         "libaaudio_headers",
-    ],
-
-    include_dirs: [
-        "system/media/audio_utils/include",
+        "libaudioutils_headers",
     ],
 }
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 21768f8..99e3691 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -66,16 +66,7 @@
 }
 
 // The status variable contains status_t codes which are used by
-// the core audio framework.
-//
-// We also consider AAudio status codes as they are non-overlapping with status_t
-// and compiler checked here.
-//
-// Caution: As AAUDIO_ERROR codes have a unique range (AAUDIO_ERROR_BASE = -900),
-// overlap with status_t should not present an issue.
-//
-// See: system/core/libutils/include/utils/Errors.h
-//      frameworks/av/media/libaaudio/include/aaudio/AAudio.h
+// the core audio framework. We also consider AAudio status codes.
 //
 // Compare with mediametrics::statusToStatusString
 //
@@ -184,6 +175,39 @@
     "log_session_id",
 };
 
+static constexpr const char * const AudioRecordStatusFields[] {
+    "mediametrics_audiorecordstatus_reported",
+    "status",
+    "debug_message",
+    "status_subcode",
+    "uid",
+    "event",
+    "input_flags",
+    "source",
+    "encoding",
+    "channel_mask",
+    "buffer_frame_count",
+    "sample_rate",
+};
+
+static constexpr const char * const AudioTrackStatusFields[] {
+    "mediametrics_audiotrackstatus_reported",
+    "status",
+    "debug_message",
+    "status_subcode",
+    "uid",
+    "event",
+    "output_flags",
+    "content_type",
+    "usage",
+    "encoding",
+    "channel_mask",
+    "buffer_frame_count",
+    "sample_rate",
+    "speed",
+    "pitch",
+};
+
 static constexpr const char * const AudioDeviceConnectionFields[] = {
     "mediametrics_audiodeviceconnection_reported",
     "input_devices",
@@ -273,33 +297,35 @@
     ALOGD("%s", __func__);
 
     // Add action to save AnalyticsState if audioserver is restarted.
-    // This triggers on an item of "audio.flinger"
-    // with a property "event" set to "AudioFlinger" (the constructor).
+    // This triggers on AudioFlinger or AudioPolicy ctors and onFirstRef,
+    // as well as TimeCheck events.
     mActions.addAction(
         AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
         std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
         std::make_shared<AnalyticsActions::Function>(
             [this](const std::shared_ptr<const android::mediametrics::Item> &item){
-                ALOGW("(key=%s) Audioflinger constructor event detected", item->getKey().c_str());
-                mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
-                        *mAnalyticsState.get()));
-                // Note: get returns shared_ptr temp, whose lifetime is extended
-                // to end of full expression.
-                mAnalyticsState->clear();  // TODO: filter the analytics state.
-                // Perhaps report this.
-
-                // Set up a timer to expire the previous audio state to save space.
-                // Use the transaction log size as a cookie to see if it is the
-                // same as before.  A benign race is possible where a state is cleared early.
-                const size_t size = mPreviousAnalyticsState->transactionLog().size();
-                mTimedAction.postIn(
-                        std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
-                    if (mPreviousAnalyticsState->transactionLog().size() == size) {
-                        ALOGD("expiring previous audio state after %d seconds.",
-                                PREVIOUS_STATE_EXPIRE_SEC);
-                        mPreviousAnalyticsState->clear();  // removes data from the state.
-                    }
-                });
+                mHealth.onAudioServerStart(Health::Module::AUDIOFLINGER, item);
+            }));
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_POLICY "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mHealth.onAudioServerStart(Health::Module::AUDIOPOLICY, item);
+            }));
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mHealth.onAudioServerTimeout(Health::Module::AUDIOFLINGER, item);
+            }));
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_AUDIO_POLICY "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                mHealth.onAudioServerTimeout(Health::Module::AUDIOPOLICY, item);
             }));
 
     // Handle legacy aaudio playback stream statistics
@@ -525,6 +551,172 @@
     // Add to the heat map - we automatically track every item's status to see
     // the types of errors and the frequency of errors.
     mHeatMap.add(prefixKey, suffixKey, eventStr, statusString, uid, message, subCode);
+
+    // Certain keys/event pairs are sent to statsd.  If we get a match (true) we return early.
+    if (reportAudioRecordStatus(item, key, eventStr, statusString, uid, message, subCode)) return;
+    if (reportAudioTrackStatus(item, key, eventStr, statusString, uid, message, subCode)) return;
+}
+
+bool AudioAnalytics::reportAudioRecordStatus(
+        const std::shared_ptr<const mediametrics::Item>& item,
+        const std::string& key, const std::string& eventStr,
+        const std::string& statusString, uid_t uid, const std::string& message,
+        int32_t subCode) const
+{
+    // Note that the prefixes often end with a '.' so we use startsWith.
+    if (!startsWith(key, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD)) return false;
+    if (eventStr == AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE) {
+        const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
+
+        // currently we only send create status events.
+        const int32_t event = android::util::
+                MEDIAMETRICS_AUDIO_RECORD_STATUS_REPORTED__EVENT__AUDIO_RECORD_EVENT_CREATE;
+
+        // The following fields should all be present in a create event.
+        std::string flagsStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_ORIGINALFLAGS, &flagsStr),
+                "%s: %s missing %s field", __func__,
+                AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_ORIGINALFLAGS);
+        const auto flags = types::lookup<types::INPUT_FLAG, int32_t>(flagsStr);
+
+        // AMEDIAMETRICS_PROP_SESSIONID omitted from atom
+
+        std::string sourceStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_SOURCE, &sourceStr),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SOURCE);
+        const int32_t source = types::lookup<types::SOURCE_TYPE, int32_t>(sourceStr);
+
+        // AMEDIAMETRICS_PROP_SELECTEDDEVICEID omitted from atom
+
+        std::string encodingStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_ENCODING, &encodingStr),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_ENCODING);
+        const auto encoding = types::lookup<types::ENCODING, int32_t>(encodingStr);
+
+        int32_t channelMask = 0;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_CHANNELMASK, &channelMask),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_CHANNELMASK);
+        int32_t frameCount = 0;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_FRAMECOUNT, &frameCount),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_FRAMECOUNT);
+        int32_t sampleRate = 0;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SAMPLERATE);
+
+        const auto [ result, str ] = sendToStatsd(AudioRecordStatusFields,
+                CONDITION(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
+                , atom_status
+                , message.c_str()
+                , subCode
+                , uid
+                , event
+                , flags
+                , source
+                , encoding
+                , (int64_t)channelMask
+                , frameCount
+                , sampleRate
+                );
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mStatsdLog->log(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
+        return true;
+    }
+    return false;
+}
+
+bool AudioAnalytics::reportAudioTrackStatus(
+        const std::shared_ptr<const mediametrics::Item>& item,
+        const std::string& key, const std::string& eventStr,
+        const std::string& statusString, uid_t uid, const std::string& message,
+        int32_t subCode) const
+{
+    // Note that the prefixes often end with a '.' so we use startsWith.
+    if (!startsWith(key, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK)) return false;
+    if (eventStr == AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE) {
+        const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
+
+        // currently we only send create status events.
+        const int32_t event = android::util::
+                MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__EVENT__AUDIO_TRACK_EVENT_CREATE;
+
+        // The following fields should all be present in a create event.
+        std::string flagsStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_ORIGINALFLAGS, &flagsStr),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_ORIGINALFLAGS);
+        const auto flags = types::lookup<types::OUTPUT_FLAG, int32_t>(flagsStr);
+
+        // AMEDIAMETRICS_PROP_SESSIONID omitted from atom
+
+        std::string contentTypeStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_CONTENTTYPE, &contentTypeStr),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_CONTENTTYPE);
+        const auto contentType = types::lookup<types::CONTENT_TYPE, int32_t>(contentTypeStr);
+
+        std::string usageStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_USAGE, &usageStr),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_USAGE);
+        const auto usage = types::lookup<types::USAGE, int32_t>(usageStr);
+
+        // AMEDIAMETRICS_PROP_SELECTEDDEVICEID omitted from atom
+
+        std::string encodingStr;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_ENCODING, &encodingStr),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_ENCODING);
+        const auto encoding = types::lookup<types::ENCODING, int32_t>(encodingStr);
+
+        int32_t channelMask = 0;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_CHANNELMASK, &channelMask),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_CHANNELMASK);
+        int32_t frameCount = 0;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_FRAMECOUNT, &frameCount),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_FRAMECOUNT);
+        int32_t sampleRate = 0;
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate),
+                "%s: %s missing %s field",
+                __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_SAMPLERATE);
+        double speed = 0.f;  // default is 1.f
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_PLAYBACK_SPEED, &speed),
+                "%s: %s missing %s field",
+                __func__,
+                AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_SPEED);
+        double pitch = 0.f;  // default is 1.f
+        ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_PLAYBACK_PITCH, &pitch),
+                "%s: %s missing %s field",
+                __func__,
+                AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
+        const auto [ result, str ] = sendToStatsd(AudioTrackStatusFields,
+                CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
+                , atom_status
+                , message.c_str()
+                , subCode
+                , uid
+                , event
+                , flags
+                , contentType
+                , usage
+                , encoding
+                , (int64_t)channelMask
+                , frameCount
+                , sampleRate
+                , (float)speed
+                , (float)pitch
+                );
+        ALOGV("%s: statsd %s", __func__, str.c_str());
+        mStatsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
+        return true;
+    }
+    return false;
 }
 
 // HELPER METHODS
@@ -1056,10 +1248,10 @@
         if (channelMask != 0) {
             switch (direction) {
                 case 1: // Output, keep sync with AudioTypes#getAAudioDirection()
-                    channelCount = audio_channel_count_from_out_mask(channelMask);
+                    channelCount = (int32_t)audio_channel_count_from_out_mask(channelMask);
                     break;
                 case 2: // Input, keep sync with AudioTypes#getAAudioDirection()
-                    channelCount = audio_channel_count_from_in_mask(channelMask);
+                    channelCount = (int32_t)audio_channel_count_from_in_mask(channelMask);
                     break;
                 default:
                     ALOGW("Invalid direction %d", direction);
@@ -1200,4 +1392,138 @@
     }
 }
 
+// Create new state, typically occurs after an AudioFlinger ctor event.
+void AudioAnalytics::newState()
+{
+    mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
+            *mAnalyticsState.get()));
+    // Note: get returns shared_ptr temp, whose lifetime is extended
+    // to end of full expression.
+    mAnalyticsState->clear();  // TODO: filter the analytics state.
+    // Perhaps report this.
+
+    // Set up a timer to expire the previous audio state to save space.
+    // Use the transaction log size as a cookie to see if it is the
+    // same as before.  A benign race is possible where a state is cleared early.
+    const size_t size = mPreviousAnalyticsState->transactionLog().size();
+    mTimedAction.postIn(
+            std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
+        if (mPreviousAnalyticsState->transactionLog().size() == size) {
+            ALOGD("expiring previous audio state after %d seconds.",
+                    PREVIOUS_STATE_EXPIRE_SEC);
+            mPreviousAnalyticsState->clear();  // removes data from the state.
+        }
+    });
+}
+
+void AudioAnalytics::Health::onAudioServerStart(Module module,
+        const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+    const auto nowTime = std::chrono::system_clock::now();
+    if (module == Module::AUDIOFLINGER) {
+       {
+            std::lock_guard lg(mLock);
+            // reset state on AudioFlinger construction.
+            // AudioPolicy is created after AudioFlinger.
+            mAudioFlingerCtorTime = nowTime;
+            mSimpleLog.log("AudioFlinger ctor");
+        }
+        mAudioAnalytics.newState();
+        return;
+    }
+    if (module == Module::AUDIOPOLICY) {
+        // A start event occurs when audioserver
+        //
+        // (1) Starts the first time
+        // (2) Restarts because of the TimeCheck watchdog
+        // (3) Restarts not because of the TimeCheck watchdog.
+        int64_t executionTimeNs = 0;
+        (void)item->get(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, &executionTimeNs);
+        const float loadTimeMs = executionTimeNs * 1e-6f;
+        std::lock_guard lg(mLock);
+        const int64_t restarts = mStartCount;
+        if (mStopCount == mStartCount) {
+            mAudioPolicyCtorTime = nowTime;
+            ++mStartCount;
+            if (mStopCount == 0) {
+                // (1) First time initialization.
+                ALOGW("%s: (key=%s) AudioPolicy ctor, loadTimeMs:%f",
+                        __func__, item->getKey().c_str(), loadTimeMs);
+                mSimpleLog.log("AudioPolicy ctor, loadTimeMs:%f", loadTimeMs);
+            } else {
+                // (2) Previous failure caught due to TimeCheck.  We know how long restart takes.
+                const float restartMs =
+                        std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                                mAudioFlingerCtorTime - mStopTime).count();
+                ALOGW("%s: (key=%s) AudioPolicy ctor, "
+                        "restarts:%lld restartMs:%f loadTimeMs:%f",
+                        __func__, item->getKey().c_str(),
+                        (long long)restarts, restartMs, loadTimeMs);
+                mSimpleLog.log("AudioPolicy ctor restarts:%lld restartMs:%f loadTimeMs:%f",
+                        (long long)restarts, restartMs, loadTimeMs);
+            }
+        } else {
+            // (3) Previous failure is NOT due to TimeCheck, so we don't know the restart time.
+            // However we can estimate the uptime from the delta time from previous ctor.
+            const float uptimeMs =
+                    std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+                            nowTime - mAudioFlingerCtorTime).count();
+            mStopCount = mStartCount;
+            mAudioPolicyCtorTime = nowTime;
+            ++mStartCount;
+
+            ALOGW("%s: (key=%s) AudioPolicy ctor after uncaught failure, "
+                    "mStartCount:%lld mStopCount:%lld uptimeMs:%f loadTimeMs:%f",
+                    __func__, item->getKey().c_str(),
+                    (long long)mStartCount, (long long)mStopCount, uptimeMs, loadTimeMs);
+            mSimpleLog.log("AudioPolicy ctor after uncaught failure, "
+                    "restarts:%lld uptimeMs:%f loadTimeMs:%f",
+                    (long long)restarts, uptimeMs, loadTimeMs);
+        }
+    }
+}
+
+void AudioAnalytics::Health::onAudioServerTimeout(Module module,
+        const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+    std::string moduleName = getModuleName(module);
+    int64_t methodCode{};
+    std::string methodName;
+    (void)item->get(AMEDIAMETRICS_PROP_METHODCODE, &methodCode);
+    (void)item->get(AMEDIAMETRICS_PROP_METHODNAME, &methodName);
+
+    std::lock_guard lg(mLock);
+    if (mStopCount >= mStartCount) {
+        ALOGD("%s: (key=%s) %s timeout %s(%lld) "
+            "unmatched mStopCount(%lld) >= mStartCount(%lld), ignoring",
+            __func__, item->getKey().c_str(), moduleName.c_str(),
+            methodName.c_str(), (long long)methodCode,
+            (long long)mStopCount, (long long)mStartCount);
+        return;
+    }
+
+    const int64_t restarts = mStartCount - 1;
+    ++mStopCount;
+    mStopTime = std::chrono::system_clock::now();
+    const float uptimeMs = std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+            mStopTime - mAudioFlingerCtorTime).count();
+    ALOGW("%s: (key=%s) %s timeout %s(%lld) restarts:%lld uptimeMs:%f",
+         __func__, item->getKey().c_str(), moduleName.c_str(),
+         methodName.c_str(), (long long)methodCode,
+         (long long)restarts, uptimeMs);
+    mSimpleLog.log("%s timeout %s(%lld) restarts:%lld uptimeMs:%f",
+            moduleName.c_str(), methodName.c_str(), (long long)methodCode,
+            (long long)restarts, uptimeMs);
+}
+
+std::pair<std::string, int32_t> AudioAnalytics::Health::dump(
+        int32_t lines, const char *prefix) const
+{
+    std::lock_guard lg(mLock);
+    std::string s = mSimpleLog.dumpToString(prefix == nullptr ? "" : prefix, lines);
+    size_t n = std::count(s.begin(), s.end(), '\n');
+    return { s, n };
+}
+
+
 } // namespace android::mediametrics
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index ab74c8e..5787e9e 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -45,6 +45,10 @@
 #define AUDIO_POWER_USAGE_PROP_DURATION_NS    "durationNs" // int64
 #define AUDIO_POWER_USAGE_PROP_TYPE           "type"       // int32
 #define AUDIO_POWER_USAGE_PROP_VOLUME         "volume"     // double
+#define AUDIO_POWER_USAGE_PROP_MIN_VOLUME_DURATION_NS "minVolumeDurationNs" // int64
+#define AUDIO_POWER_USAGE_PROP_MIN_VOLUME             "minVolume"           // double
+#define AUDIO_POWER_USAGE_PROP_MAX_VOLUME_DURATION_NS "maxVolumeDurationNs" // int64
+#define AUDIO_POWER_USAGE_PROP_MAX_VOLUME             "maxVolume"           // double
 
 namespace android::mediametrics {
 
@@ -141,13 +145,34 @@
     double volume;
     if (!item->getDouble(AUDIO_POWER_USAGE_PROP_VOLUME, &volume)) return;
 
+    int64_t min_volume_duration_ns;
+    if (!item->getInt64(AUDIO_POWER_USAGE_PROP_MIN_VOLUME_DURATION_NS, &min_volume_duration_ns)) {
+        return;
+    }
+
+    double min_volume;
+    if (!item->getDouble(AUDIO_POWER_USAGE_PROP_MIN_VOLUME, &min_volume)) return;
+
+    int64_t max_volume_duration_ns;
+    if (!item->getInt64(AUDIO_POWER_USAGE_PROP_MAX_VOLUME_DURATION_NS, &max_volume_duration_ns)) {
+        return;
+    }
+
+    double max_volume;
+    if (!item->getDouble(AUDIO_POWER_USAGE_PROP_MAX_VOLUME, &max_volume)) return;
+
     const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
-    const float average_volume = (float)volume;
+    const int32_t min_volume_duration_secs = (int32_t)(min_volume_duration_ns / NANOS_PER_SECOND);
+    const int32_t max_volume_duration_secs = (int32_t)(max_volume_duration_ns / NANOS_PER_SECOND);
     const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
                                          audio_device,
                                          duration_secs,
-                                         average_volume,
-                                         type);
+                                         (float)volume,
+                                         type,
+                                         min_volume_duration_secs,
+                                         (float)min_volume,
+                                         max_volume_duration_secs,
+                                         (float)max_volume);
 
     std::stringstream log;
     log << "result:" << result << " {"
@@ -155,17 +180,43 @@
             << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
             << " audio_device:" << audio_device
             << " duration_secs:" << duration_secs
-            << " average_volume:" << average_volume
+            << " average_volume:" << (float)volume
             << " type:" << type
+            << " min_volume_duration_secs:" << min_volume_duration_secs
+            << " min_volume:" << (float)min_volume
+            << " max_volume_duration_secs:" << max_volume_duration_secs
+            << " max_volume:" << (float)max_volume
             << " }";
     mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
 }
 
+void AudioPowerUsage::updateMinMaxVolumeAndDuration(
+            const int64_t cur_max_volume_duration_ns, const double cur_max_volume,
+            const int64_t cur_min_volume_duration_ns, const double cur_min_volume,
+            int64_t& f_max_volume_duration_ns, double& f_max_volume,
+            int64_t& f_min_volume_duration_ns, double& f_min_volume)
+{
+    if (f_min_volume > cur_min_volume) {
+        f_min_volume = cur_min_volume;
+        f_min_volume_duration_ns = cur_min_volume_duration_ns;
+    } else if (f_min_volume == cur_min_volume) {
+        f_min_volume_duration_ns += cur_min_volume_duration_ns;
+    }
+    if (f_max_volume < cur_max_volume) {
+        f_max_volume = cur_max_volume;
+        f_max_volume_duration_ns = cur_max_volume_duration_ns;
+    } else if (f_max_volume == cur_max_volume) {
+        f_max_volume_duration_ns += cur_max_volume_duration_ns;
+    }
+}
+
 bool AudioPowerUsage::saveAsItem_l(
-        int32_t device, int64_t duration_ns, int32_t type, double average_vol)
+        int32_t device, int64_t duration_ns, int32_t type, double average_vol,
+        int64_t max_volume_duration_ns, double max_volume,
+        int64_t min_volume_duration_ns, double min_volume)
 {
     ALOGV("%s: (%#x, %d, %lld, %f)", __func__, device, type,
-                                   (long long)duration_ns, average_vol );
+                                   (long long)duration_ns, average_vol);
     if (duration_ns == 0) {
         return true; // skip duration 0 usage
     }
@@ -193,10 +244,36 @@
             item->setDouble(AUDIO_POWER_USAGE_PROP_VOLUME, final_volume);
             item->setTimestamp(systemTime(SYSTEM_TIME_REALTIME));
 
-            ALOGV("%s: update (%#x, %d, %lld, %f) --> (%lld, %f)", __func__,
+            // Update the max/min volume and duration
+            int64_t final_min_volume_duration_ns;
+            int64_t final_max_volume_duration_ns;
+            double final_min_volume;
+            double final_max_volume;
+
+            item->getInt64(AUDIO_POWER_USAGE_PROP_MIN_VOLUME_DURATION_NS,
+                           &final_min_volume_duration_ns);
+            item->getDouble(AUDIO_POWER_USAGE_PROP_MIN_VOLUME, &final_min_volume);
+            item->getInt64(AUDIO_POWER_USAGE_PROP_MAX_VOLUME_DURATION_NS,
+                           &final_max_volume_duration_ns);
+            item->getDouble(AUDIO_POWER_USAGE_PROP_MAX_VOLUME, &final_max_volume);
+            updateMinMaxVolumeAndDuration(max_volume_duration_ns, max_volume,
+                                          min_volume_duration_ns, min_volume,
+                                          final_max_volume_duration_ns, final_max_volume,
+                                          final_min_volume_duration_ns, final_min_volume);
+            item->setInt64(AUDIO_POWER_USAGE_PROP_MIN_VOLUME_DURATION_NS,
+                           final_min_volume_duration_ns);
+            item->setDouble(AUDIO_POWER_USAGE_PROP_MIN_VOLUME, final_min_volume);
+            item->setInt64(AUDIO_POWER_USAGE_PROP_MAX_VOLUME_DURATION_NS,
+                           final_max_volume_duration_ns);
+            item->setDouble(AUDIO_POWER_USAGE_PROP_MAX_VOLUME, final_max_volume);
+
+            ALOGV("%s: update (%#x, %d, %lld, %f) --> (%lld, %f) min(%lld, %f) max(%lld, %f)",
+                  __func__,
                   device, type,
                   (long long)item_duration_ns, item_volume,
-                  (long long)final_duration_ns, final_volume);
+                  (long long)final_duration_ns, final_volume,
+                  (long long)final_min_volume_duration_ns, final_min_volume,
+                  (long long)final_max_volume_duration_ns, final_max_volume);
 
             return true;
         }
@@ -208,12 +285,18 @@
     sitem->setInt64(AUDIO_POWER_USAGE_PROP_DURATION_NS, duration_ns);
     sitem->setInt32(AUDIO_POWER_USAGE_PROP_TYPE, type);
     sitem->setDouble(AUDIO_POWER_USAGE_PROP_VOLUME, average_vol);
+    sitem->setInt64(AUDIO_POWER_USAGE_PROP_MIN_VOLUME_DURATION_NS, min_volume_duration_ns);
+    sitem->setDouble(AUDIO_POWER_USAGE_PROP_MIN_VOLUME, min_volume);
+    sitem->setInt64(AUDIO_POWER_USAGE_PROP_MAX_VOLUME_DURATION_NS, max_volume_duration_ns);
+    sitem->setDouble(AUDIO_POWER_USAGE_PROP_MAX_VOLUME, max_volume);
     mItems.emplace_back(sitem);
     return true;
 }
 
 bool AudioPowerUsage::saveAsItems_l(
-        int32_t device, int64_t duration_ns, int32_t type, double average_vol)
+        int32_t device, int64_t duration_ns, int32_t type, double average_vol,
+        int64_t max_volume_duration, double max_volume,
+        int64_t min_volume_duration, double min_volume)
 {
     ALOGV("%s: (%#x, %d, %lld, %f)", __func__, device, type,
                                    (long long)duration_ns, average_vol );
@@ -232,7 +315,9 @@
         int32_t tmp_device = device_bits & -device_bits; // get lowest bit
         device_bits ^= tmp_device;  // clear lowest bit
         tmp_device |= input_bit;    // restore input bit
-        ret = saveAsItem_l(tmp_device, duration_ns, type, average_vol);
+        ret = saveAsItem_l(tmp_device, duration_ns, type, average_vol,
+                           max_volume_duration, max_volume,
+                           min_volume_duration, min_volume);
 
         ALOGV("%s: device %#x recorded, remaining device_bits = %#x", __func__,
             tmp_device, device_bits);
@@ -250,9 +335,28 @@
         return;
     }
     double deviceVolume = 1.;
-    if (isTrack && !item->getDouble(AMEDIAMETRICS_PROP_DEVICEVOLUME, &deviceVolume)) {
-        return;
+    int64_t maxVolumeDurationNs = 0;
+    double maxVolume = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
+    int64_t minVolumeDurationNs = 0;
+    double minVolume = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
+    if (isTrack) {
+        if (!item->getDouble(AMEDIAMETRICS_PROP_DEVICEVOLUME, &deviceVolume)) {
+            return;
+        }
+        if (!item->getInt64(AMEDIAMETRICS_PROP_DEVICEMAXVOLUMEDURATIONNS, &maxVolumeDurationNs)) {
+            return;
+        }
+        if (!item->getDouble(AMEDIAMETRICS_PROP_DEVICEMAXVOLUME, &maxVolume)) {
+            return;
+        }
+        if (!item->getInt64(AMEDIAMETRICS_PROP_DEVICEMINVOLUMEDURATIONNS, &minVolumeDurationNs)) {
+            return;
+        }
+        if (!item->getDouble(AMEDIAMETRICS_PROP_DEVICEMINVOLUME, &minVolume)) {
+            return;
+        }
     }
+
     int32_t type = 0;
     std::string type_string;
     if ((isTrack && mAudioAnalytics->mAnalyticsState->timeMachine().get(
@@ -285,7 +389,8 @@
         ALOGV("device = %s => %d", device_strings.c_str(), device);
     }
     std::lock_guard l(mLock);
-    saveAsItems_l(device, deviceTimeNs, type, deviceVolume);
+    saveAsItems_l(device, deviceTimeNs, type, deviceVolume,
+                  maxVolumeDurationNs, maxVolume, minVolumeDurationNs, minVolume);
 }
 
 void AudioPowerUsage::checkMode(const std::shared_ptr<const mediametrics::Item>& item)
@@ -299,10 +404,17 @@
     if (mMode == "AUDIO_MODE_IN_CALL") { // leaving call mode
         const int64_t endCallNs = item->getTimestamp();
         const int64_t durationNs = endCallNs - mDeviceTimeNs;
+        const int64_t volumeDurationNs = endCallNs - mVolumeTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endCallNs - mVolumeTimeNs)) / (double)durationNs;
-            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+                    mVoiceVolume * double(volumeDurationNs)) / (double)durationNs;
+            updateMinMaxVolumeAndDuration(volumeDurationNs, mVoiceVolume,
+                          volumeDurationNs, mVoiceVolume,
+                          mMaxVoiceVolumeDurationNs, mMaxVoiceVolume,
+                          mMinVoiceVolumeDurationNs, mMinVoiceVolume);
+            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume,
+                          mMaxVoiceVolumeDurationNs, mMaxVoiceVolume,
+                          mMinVoiceVolumeDurationNs, mMinVoiceVolume);
         }
     } else if (mode == "AUDIO_MODE_IN_CALL") { // entering call mode
         mStartCallNs = item->getTimestamp(); // advisory only
@@ -327,10 +439,15 @@
     if (mMode == "AUDIO_MODE_IN_CALL") {
         const int64_t timeNs = item->getTimestamp();
         const int64_t durationNs = timeNs - mDeviceTimeNs;
+        const int64_t volumeDurationNs = timeNs - mVolumeTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(timeNs - mVolumeTimeNs)) / (double)durationNs;
+                    mVoiceVolume * double(volumeDurationNs)) / (double)durationNs;
             mVolumeTimeNs = timeNs;
+            updateMinMaxVolumeAndDuration(volumeDurationNs, mVoiceVolume,
+                          volumeDurationNs, mVoiceVolume,
+                          mMaxVoiceVolumeDurationNs, mMaxVoiceVolume,
+                          mMinVoiceVolumeDurationNs, mMinVoiceVolume);
         }
     }
     ALOGV("%s: new voice volume:%lf  old voice volume:%lf", __func__, voiceVolume, mVoiceVolume);
@@ -358,15 +475,26 @@
         // Save statistics
         const int64_t endDeviceNs = item->getTimestamp();
         const int64_t durationNs = endDeviceNs - mDeviceTimeNs;
+        const int64_t volumeDurationNs = endDeviceNs - mVolumeTimeNs;
         if (durationNs > 0) {
             mDeviceVolume = (mDeviceVolume * double(mVolumeTimeNs - mDeviceTimeNs) +
-                    mVoiceVolume * double(endDeviceNs - mVolumeTimeNs)) / (double)durationNs;
-            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume);
+                    mVoiceVolume * double(volumeDurationNs)) / (double)durationNs;
+            updateMinMaxVolumeAndDuration(volumeDurationNs, mVoiceVolume,
+                          volumeDurationNs, mVoiceVolume,
+                          mMaxVoiceVolumeDurationNs, mMaxVoiceVolume,
+                          mMinVoiceVolumeDurationNs, mMinVoiceVolume);
+            saveAsItems_l(mPrimaryDevice, durationNs, VOICE_CALL_TYPE, mDeviceVolume,
+                          mMaxVoiceVolumeDurationNs, mMaxVoiceVolume,
+                          mMinVoiceVolumeDurationNs, mMinVoiceVolume);
         }
         // reset statistics
         mDeviceVolume = 0;
         mDeviceTimeNs = endDeviceNs;
         mVolumeTimeNs = endDeviceNs;
+        mMaxVoiceVolume = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
+        mMinVoiceVolume = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
+        mMaxVoiceVolumeDurationNs = 0;
+        mMinVoiceVolumeDurationNs = 0;
     }
     ALOGV("%s: new primary device:%#x  old primary device:%#x", __func__, device, mPrimaryDevice);
     mPrimaryDevice = device;
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index b67967b..6e24a58 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -128,6 +128,9 @@
         {"AUDIO_DEVICE_OUT_BLE_HEADSET",               1LL << 31},
         {"AUDIO_DEVICE_OUT_BLE_SPEAKER",               1LL << 32},
         {"AUDIO_DEVICE_OUT_HDMI_EARC",                 1LL << 33},
+        // S values above
+        {"AUDIO_DEVICE_OUT_BLE_BROADCAST",             1LL << 34},
+        // T values above
     };
     return map;
 }
@@ -145,6 +148,8 @@
         {"MMAP_PLAYBACK", 5},          // Thread class for MMAP playback stream
         {"MMAP_CAPTURE",  6},          // Thread class for MMAP capture stream
         // R values above.
+        {"SPATIALIZER",   7},          // Thread class for SpatializerThread
+        // S values above.
     };
     return map;
 }
@@ -192,6 +197,31 @@
     return map;
 }
 
+const std::unordered_map<std::string, int32_t>& getStatusMap() {
+    // DO NOT MODIFY VALUES(OK to add new ones).
+    static std::unordered_map<std::string, int32_t> map {
+        {"",
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_OK,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_ARGUMENT,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_IO,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_MEMORY,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_SECURITY,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_STATE,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_TIMEOUT,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
+        {AMEDIAMETRICS_PROP_STATUS_VALUE_UNKNOWN,
+            util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
+    };
+    return map;
+}
+
 // Helper: Create the corresponding int32 from string flags split with '|'.
 template <typename Traits>
 int32_t int32FromFlags(const std::string &flags)
@@ -433,6 +463,17 @@
 }
 
 template <>
+int32_t lookup<STATUS>(const std::string &status)
+{
+    auto& map = getStatusMap();
+    auto it = map.find(status);
+    if (it == map.end()) {
+        return util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
+    }
+    return it->second;
+}
+
+template <>
 int32_t lookup<THREAD_TYPE>(const std::string &threadType)
 {
     auto& map = getAudioThreadTypeMap();
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 636b343..ff16b9e 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -327,6 +327,15 @@
                 result << "-- some lines may be truncated --\n";
             }
 
+            const int32_t healthLinesToDump = all ? INT32_MAX : 15;
+            result << "\nHealth Message Log:";
+            const auto [ healthDumpString, healthLines ] =
+                    mAudioAnalytics.dumpHealth(healthLinesToDump);
+            result << "\n" << healthDumpString;
+            if (healthLines == healthLinesToDump) {
+                result << "-- some lines may be truncated --\n";
+            }
+
             result << "\nLogSessionId:\n"
                    << mediametrics::ValidateId::get()->dump();
 
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index b03e518..84d494e 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -56,11 +56,11 @@
         "libstatssocket",
         "libutils",
         "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/services/mediametrics",
-        "system/media/audio_utils/include",
+    header_libs: [
+        "libaudioutils_headers",
     ],
 
     fuzz_config: {
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 8b0b479..433332c 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -19,15 +19,14 @@
  */
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/MediaMetricsItem.h>
+#include <mediametricsservice/AudioTypes.h>
+#include <mediametricsservice/MediaMetricsService.h>
+#include <mediametricsservice/StringUtils.h>
 #include <stdio.h>
 #include <string.h>
 #include <utils/Log.h>
 #include <algorithm>
 
-#include "AudioTypes.h"
-#include "MediaMetricsService.h"
-#include "StringUtils.h"
-
 using namespace android;
 
 // low water mark
@@ -48,6 +47,7 @@
     void invokeAudioAnalytics(const uint8_t *data, size_t size);
     void invokeTimedAction(const uint8_t *data, size_t size);
     void process(const uint8_t *data, size_t size);
+    std::atomic_int mValue = 0;
 };
 
 void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
@@ -342,11 +342,10 @@
 void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
     FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
     android::mediametrics::TimedAction timedAction;
-    std::atomic_int value = 0;
 
     while (fdp.remaining_bytes()) {
         timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
-                           [&value] { ++value; });
+                           [this] { ++mValue; });
         timedAction.size();
     }
 }
diff --git a/services/mediametrics/AnalyticsActions.h b/services/mediametrics/include/mediametricsservice/AnalyticsActions.h
similarity index 100%
rename from services/mediametrics/AnalyticsActions.h
rename to services/mediametrics/include/mediametricsservice/AnalyticsActions.h
diff --git a/services/mediametrics/AnalyticsState.h b/services/mediametrics/include/mediametricsservice/AnalyticsState.h
similarity index 100%
rename from services/mediametrics/AnalyticsState.h
rename to services/mediametrics/include/mediametricsservice/AnalyticsState.h
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
similarity index 73%
rename from services/mediametrics/AudioAnalytics.h
rename to services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index 9b54cf3..5ee8c30 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -83,6 +83,15 @@
         return mHeatMap.dump(lines);
     }
 
+    /**
+     * Returns a pair consisting of the dump string and the number of lines in the string.
+     *
+     * Health dump.
+     */
+    std::pair<std::string, int32_t> dumpHealth(int32_t lines = INT32_MAX) const {
+        return mHealth.dump(lines);
+    }
+
     void clear() {
         // underlying state is locked.
         mPreviousAnalyticsState->clear();
@@ -122,6 +131,19 @@
      */
     void processStatus(const std::shared_ptr<const mediametrics::Item>& item);
 
+    // Specific reporting methods
+    bool reportAudioRecordStatus(
+            const std::shared_ptr<const mediametrics::Item>& item,
+            const std::string& key, const std::string& eventStr,
+            const std::string& statusString, uid_t uid, const std::string& message,
+            int32_t subCode) const;
+
+    bool reportAudioTrackStatus(
+            const std::shared_ptr<const mediametrics::Item>& item,
+            const std::string& key, const std::string& eventStr,
+            const std::string& statusString, uid_t uid, const std::string& message,
+            int32_t subCode) const;
+
     // HELPER METHODS
     /**
      * Return the audio thread associated with an audio track name.
@@ -234,6 +256,67 @@
         AudioAnalytics &mAudioAnalytics;
     } mAAudioStreamInfo{*this};
 
+    // Create new state, typically occurs after an AudioFlinger ctor event.
+    void newState();
+
+    // Health is a nested class that tracks audioserver health properties
+    class Health {
+    public:
+        explicit Health(AudioAnalytics &audioAnalytics)
+            : mAudioAnalytics(audioAnalytics) {}
+
+        enum class Module {
+            AUDIOFLINGER,
+            AUDIOPOLICY,
+        };
+
+        const char *getModuleName(Module module) {
+            switch (module) {
+                case Module::AUDIOFLINGER: return "AudioFlinger";
+                case Module::AUDIOPOLICY: return "AudioPolicy";
+            }
+            return "Unknown";
+        }
+
+        // Called when we believe audioserver starts (AudioFlinger ctor)
+        void onAudioServerStart(Module module,
+                const std::shared_ptr<const android::mediametrics::Item> &item);
+
+        // Called when we believe audioserver crashes (TimeCheck timeouts).
+        void onAudioServerTimeout(Module module,
+                const std::shared_ptr<const android::mediametrics::Item> &item);
+
+        std::pair<std::string, int32_t> dump(
+                int32_t lines = INT32_MAX, const char *prefix = nullptr) const;
+
+    private:
+        AudioAnalytics& mAudioAnalytics;
+
+        mutable std::mutex mLock;
+
+        // Life cycle of AudioServer
+        // mAudioFlingerCtorTime
+        // mAudioPolicyCtorTime
+        // mAudioPolicyCtorDoneTime
+        // ...
+        // possibly mStopTime  (if TimeCheck thread)
+        //
+        // UpTime is measured from mStopTime - mAudioFlingerCtorTime.
+        //
+        // The stop events come from TimeCheck timeout aborts.  There may be other
+        // uncaught signals, e.g. SIGSEGV, that cause missing stop events.
+        std::chrono::system_clock::time_point mAudioFlingerCtorTime GUARDED_BY(mLock);
+        std::chrono::system_clock::time_point mAudioPolicyCtorTime GUARDED_BY(mLock);
+        std::chrono::system_clock::time_point mAudioPolicyCtorDoneTime GUARDED_BY(mLock);
+        std::chrono::system_clock::time_point mStopTime GUARDED_BY(mLock);
+
+        // mStartCount and mStopCount track the audioserver start and stop events.
+        int64_t mStartCount GUARDED_BY(mLock) = 0;
+        int64_t mStopCount GUARDED_BY(mLock) = 0;
+
+        SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
+    } mHealth{*this};
+
     AudioPowerUsage mAudioPowerUsage;
 };
 
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
similarity index 80%
rename from services/mediametrics/AudioPowerUsage.h
rename to services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
index 7021902..b7215e6 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
@@ -26,6 +26,7 @@
 
 namespace android::mediametrics {
 
+
 class AudioAnalytics;
 
 class AudioPowerUsage {
@@ -83,13 +84,21 @@
     static bool deviceFromString(const std::string& device_string, int32_t& device);
     static int32_t deviceFromStringPairs(const std::string& device_strings);
 private:
-    bool saveAsItem_l(int32_t device, int64_t duration, int32_t type, double average_vol)
-         REQUIRES(mLock);
+    bool saveAsItem_l(int32_t device, int64_t duration, int32_t type, double average_vol,
+                      int64_t max_volume_duration, double max_volume,
+                      int64_t min_volume_duration, double min_volume)
+                      REQUIRES(mLock);
     void sendItem(const std::shared_ptr<const mediametrics::Item>& item) const;
     void collect();
-    bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol)
-         REQUIRES(mLock);
-
+    bool saveAsItems_l(int32_t device, int64_t duration, int32_t type, double average_vol,
+                      int64_t max_volume_duration, double max_volume,
+                      int64_t min_volume_duration, double min_volume)
+                      REQUIRES(mLock);
+    void updateMinMaxVolumeAndDuration(
+            const int64_t cur_max_volume_duration_ns, const double cur_max_volume,
+            const int64_t cur_min_volume_duration_ns, const double cur_min_volume,
+            int64_t& f_max_volume_duration_ns, double& f_max_volume,
+            int64_t& f_min_volume_duration_ns, double& f_min_volume);
     AudioAnalytics * const mAudioAnalytics;
     const std::shared_ptr<StatsdLog> mStatsdLog;  // mStatsdLog is internally locked
     const bool mDisabled;
@@ -100,6 +109,10 @@
 
     double mVoiceVolume GUARDED_BY(mLock) = 0.;
     double mDeviceVolume GUARDED_BY(mLock) = 0.;
+    double mMaxVoiceVolume GUARDED_BY(mLock) = AMEDIAMETRICS_INITIAL_MAX_VOLUME;
+    double mMinVoiceVolume GUARDED_BY(mLock) = AMEDIAMETRICS_INITIAL_MIN_VOLUME;
+    int64_t mMaxVoiceVolumeDurationNs GUARDED_BY(mLock) = 0;
+    int64_t mMinVoiceVolumeDurationNs GUARDED_BY(mLock) = 0;
     int64_t mStartCallNs GUARDED_BY(mLock) = 0; // advisory only
     int64_t mVolumeTimeNs GUARDED_BY(mLock) = 0;
     int64_t mDeviceTimeNs GUARDED_BY(mLock) = 0;
diff --git a/services/mediametrics/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
similarity index 92%
rename from services/mediametrics/AudioTypes.h
rename to services/mediametrics/include/mediametricsservice/AudioTypes.h
index 4394d79..5dbff9b 100644
--- a/services/mediametrics/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -39,6 +39,10 @@
 };
 
 // Enumeration for all the string translations to integers (generally int32_t) unless noted.
+// This is used to index the template method below:
+// template <AudioEnumCategory C, typename T, typename S>  T lookup(const S &str);
+//
+// Okay to keep AudioEnumCategory alphabetical and add new translations in the middle.
 enum AudioEnumCategory {
     AAUDIO_DIRECTION,
     AAUDIO_PERFORMANCE_MODE,
@@ -51,6 +55,7 @@
     OUTPUT_DEVICE, // int64_t
     OUTPUT_FLAG,
     SOURCE_TYPE,
+    STATUS,
     STREAM_TYPE,
     THREAD_TYPE,
     TRACK_TRAITS,
diff --git a/services/mediametrics/HeatMap.h b/services/mediametrics/include/mediametricsservice/HeatMap.h
similarity index 100%
rename from services/mediametrics/HeatMap.h
rename to services/mediametrics/include/mediametricsservice/HeatMap.h
diff --git a/services/mediametrics/LruSet.h b/services/mediametrics/include/mediametricsservice/LruSet.h
similarity index 100%
rename from services/mediametrics/LruSet.h
rename to services/mediametrics/include/mediametricsservice/LruSet.h
diff --git a/services/mediametrics/MediaMetricsService.h b/services/mediametrics/include/mediametricsservice/MediaMetricsService.h
similarity index 100%
rename from services/mediametrics/MediaMetricsService.h
rename to services/mediametrics/include/mediametricsservice/MediaMetricsService.h
diff --git a/services/mediametrics/StatsdLog.h b/services/mediametrics/include/mediametricsservice/StatsdLog.h
similarity index 100%
rename from services/mediametrics/StatsdLog.h
rename to services/mediametrics/include/mediametricsservice/StatsdLog.h
diff --git a/services/mediametrics/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
similarity index 100%
rename from services/mediametrics/StringUtils.h
rename to services/mediametrics/include/mediametricsservice/StringUtils.h
diff --git a/services/mediametrics/TimeMachine.h b/services/mediametrics/include/mediametricsservice/TimeMachine.h
similarity index 100%
rename from services/mediametrics/TimeMachine.h
rename to services/mediametrics/include/mediametricsservice/TimeMachine.h
diff --git a/services/mediametrics/TimedAction.h b/services/mediametrics/include/mediametricsservice/TimedAction.h
similarity index 100%
rename from services/mediametrics/TimedAction.h
rename to services/mediametrics/include/mediametricsservice/TimedAction.h
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/include/mediametricsservice/TransactionLog.h
similarity index 98%
rename from services/mediametrics/TransactionLog.h
rename to services/mediametrics/include/mediametricsservice/TransactionLog.h
index 0ca4639..fd42518 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/include/mediametricsservice/TransactionLog.h
@@ -158,7 +158,7 @@
                 ++it) {
             if (ll <= 0) break;
             if (prefix != nullptr && !startsWith(it->first, prefix)) break;
-            auto [s, l] = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
+            std::tie(s, l) = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
             if (l == 0) continue; // don't show empty groups (due to sinceNs).
             ss << " " << it->first << "\n" << s;
             ll -= l + 1;
diff --git a/services/mediametrics/ValidateId.h b/services/mediametrics/include/mediametricsservice/ValidateId.h
similarity index 100%
rename from services/mediametrics/ValidateId.h
rename to services/mediametrics/include/mediametricsservice/ValidateId.h
diff --git a/services/mediametrics/Wrap.h b/services/mediametrics/include/mediametricsservice/Wrap.h
similarity index 100%
rename from services/mediametrics/Wrap.h
rename to services/mediametrics/include/mediametricsservice/Wrap.h
diff --git a/services/mediametrics/cleaner.h b/services/mediametrics/include/mediametricsservice/cleaner.h
similarity index 100%
rename from services/mediametrics/cleaner.h
rename to services/mediametrics/include/mediametricsservice/cleaner.h
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/include/mediametricsservice/iface_statsd.h
similarity index 100%
rename from services/mediametrics/iface_statsd.h
rename to services/mediametrics/include/mediametricsservice/iface_statsd.h
diff --git a/services/mediametrics/main_mediametrics.cpp b/services/mediametrics/main_mediametrics.cpp
index 3a66538..455d67a 100644
--- a/services/mediametrics/main_mediametrics.cpp
+++ b/services/mediametrics/main_mediametrics.cpp
@@ -18,11 +18,10 @@
 //#define LOG_NDEBUG 0
 #include <utils/Log.h>
 
-#include "MediaMetricsService.h"
-
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
+#include <mediametricsservice/MediaMetricsService.h>
 #include <mediautils/LimitProcessMemory.h>
 
 int main(int argc __unused, char **argv)
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 8581437..a737ba0 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -231,7 +231,7 @@
         sessionId = mediametrics::ValidateId::get()->validateId(sessionId);
         metrics_proto.set_log_session_id(sessionId);
     }
-    AStatsEvent_writeString(event, codec.c_str());
+    AStatsEvent_writeString(event, sessionId.c_str());
 
     int32_t channelCount = -1;
     if (item->getInt32("android.media.mediacodec.channelCount", &channelCount)) {
@@ -390,6 +390,60 @@
     }
     AStatsEvent_writeInt32(event, qpBMaxOri);
 
+    int32_t configColorStandard = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
+        metrics_proto.set_config_color_standard(configColorStandard);
+    }
+    AStatsEvent_writeInt32(event, configColorStandard);
+
+    int32_t configColorRange = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
+        metrics_proto.set_config_color_range(configColorRange);
+    }
+    AStatsEvent_writeInt32(event, configColorRange);
+
+    int32_t configColorTransfer = -1;
+    if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
+        metrics_proto.set_config_color_transfer(configColorTransfer);
+    }
+    AStatsEvent_writeInt32(event, configColorTransfer);
+
+    int32_t parsedColorStandard = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
+        metrics_proto.set_parsed_color_standard(parsedColorStandard);
+    }
+    AStatsEvent_writeInt32(event, parsedColorStandard);
+
+    int32_t parsedColorRange = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
+        metrics_proto.set_parsed_color_range(parsedColorRange);
+    }
+    AStatsEvent_writeInt32(event, parsedColorRange);
+
+    int32_t parsedColorTransfer = -1;
+    if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
+        metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
+    }
+    AStatsEvent_writeInt32(event, parsedColorTransfer);
+
+    int32_t hdrStaticInfo = -1;
+    if (item->getInt32("android.media.mediacodec.hdr-static-info", &hdrStaticInfo)) {
+        metrics_proto.set_hdr_static_info(hdrStaticInfo);
+    }
+    AStatsEvent_writeInt32(event, hdrStaticInfo);
+
+    int32_t hdr10PlusInfo = -1;
+    if (item->getInt32("android.media.mediacodec.hdr10-plus-info", &hdr10PlusInfo)) {
+        metrics_proto.set_hdr10_plus_info(hdr10PlusInfo);
+    }
+    AStatsEvent_writeInt32(event, hdr10PlusInfo);
+
+    int32_t hdrFormat= -1;
+    if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
+        metrics_proto.set_hdr_format(hdrFormat);
+    }
+    AStatsEvent_writeInt32(event, hdrFormat);
+
     int err = AStatsEvent_write(event);
     if (err < 0) {
       ALOGE("Failed to write codec metrics to statsd (%d)", err);
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 287fb8d..e06a605 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -171,7 +171,7 @@
     std::vector<uint8_t> buf(str.length() / 4 * 3, 0);
     size_t size = buf.size();
     if (decodeBase64(buf.data(), &size, str.c_str()) && size <= buf.size()) {
-        buf.erase(buf.begin() + size, buf.end());
+        buf.erase(buf.begin() + (ptrdiff_t)size, buf.end());
         return buf;
     }
     return {};
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index 3baf739..f46fbad 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -33,6 +33,7 @@
         "libmediautils",
         "libutils",
         "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index 102700a..bc7b47b 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -17,19 +17,18 @@
 #define LOG_TAG "mediametrics_tests"
 #include <utils/Log.h>
 
-#include "MediaMetricsService.h"
 
 #include <stdio.h>
 #include <unordered_set>
 
 #include <gtest/gtest.h>
 #include <media/MediaMetricsItem.h>
+#include <mediametricsservice/AudioTypes.h>
+#include <mediametricsservice/MediaMetricsService.h>
+#include <mediametricsservice/StringUtils.h>
+#include <mediametricsservice/ValidateId.h>
 #include <system/audio.h>
 
-#include "AudioTypes.h"
-#include "StringUtils.h"
-#include "ValidateId.h"
-
 using namespace android;
 
 static size_t countNewlines(const char *s) {
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index f31202b..5d80744 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -90,7 +90,7 @@
     ],
 
     static_libs: [
-        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "resourceobserver_aidl_interface-V1-ndk",
     ],
 
     include_dirs: ["frameworks/av/include"],
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.h b/services/mediaresourcemanager/IMediaResourceMonitor.h
index f92d557..4dd87e1 100644
--- a/services/mediaresourcemanager/IMediaResourceMonitor.h
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.h
@@ -32,6 +32,7 @@
     enum {
         TYPE_VIDEO_CODEC = 0,
         TYPE_AUDIO_CODEC = 1,
+        TYPE_IMAGE_CODEC = 2,
     };
 
     virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type) = 0;
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 953686b..b4610bc 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -51,8 +51,8 @@
 
 class DeathNotifier : public RefBase {
 public:
-    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
-            int pid, int64_t clientId);
+    DeathNotifier(const std::shared_ptr<ResourceManagerService> &service, int pid,
+            int64_t clientId);
 
     virtual ~DeathNotifier() {}
 
@@ -130,27 +130,48 @@
     return itemsStr;
 }
 
-static bool hasResourceType(MediaResource::Type type, const ResourceList& resources) {
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        MediaResourceParcel resource) {
+    if (type != resource.type) {
+      return false;
+    }
+    switch (type) {
+        // Codec subtypes (e.g. video vs. audio) are each considered separate resources, so
+        // compare the subtypes as well.
+        case MediaResource::Type::kSecureCodec:
+        case MediaResource::Type::kNonSecureCodec:
+            if (resource.subType == subType) {
+                return true;
+            }
+            break;
+        // Non-codec resources are not segregated by the subtype (e.g. video vs. audio).
+        default:
+            return true;
+    }
+    return false;
+}
+
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceList& resources) {
     for (auto it = resources.begin(); it != resources.end(); it++) {
-        if (it->second.type == type) {
+        if (hasResourceType(type, subType, it->second)) {
             return true;
         }
     }
     return false;
 }
 
-static bool hasResourceType(MediaResource::Type type, const ResourceInfos& infos) {
+static bool hasResourceType(MediaResource::Type type, MediaResource::SubType subType,
+        const ResourceInfos& infos) {
     for (size_t i = 0; i < infos.size(); ++i) {
-        if (hasResourceType(type, infos[i].resources)) {
+        if (hasResourceType(type, subType, infos[i].resources)) {
             return true;
         }
     }
     return false;
 }
 
-static ResourceInfos& getResourceInfosForEdit(
-        int pid,
-        PidResourceInfosMap& map) {
+static ResourceInfos& getResourceInfosForEdit(int pid, PidResourceInfosMap& map) {
     ssize_t index = map.indexOfKey(pid);
     if (index < 0) {
         // new pid
@@ -161,11 +182,8 @@
     return map.editValueFor(pid);
 }
 
-static ResourceInfo& getResourceInfoForEdit(
-        uid_t uid,
-        int64_t clientId,
-        const std::shared_ptr<IResourceManagerClient>& client,
-        ResourceInfos& infos) {
+static ResourceInfo& getResourceInfoForEdit(uid_t uid, int64_t clientId,
+        const std::shared_ptr<IResourceManagerClient>& client, ResourceInfos& infos) {
     ssize_t index = infos.indexOfKey(clientId);
 
     if (index < 0) {
@@ -188,17 +206,24 @@
     if (binder != NULL) {
         sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
         for (size_t i = 0; i < resources.size(); ++i) {
-            if (resources[i].subType == MediaResource::SubType::kAudioCodec) {
-                service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
-            } else if (resources[i].subType == MediaResource::SubType::kVideoCodec) {
-                service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
+            switch (resources[i].subType) {
+                case MediaResource::SubType::kAudioCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_AUDIO_CODEC);
+                    break;
+                case MediaResource::SubType::kVideoCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_VIDEO_CODEC);
+                    break;
+                case MediaResource::SubType::kImageCodec:
+                    service->notifyResourceGranted(pid, IMediaResourceMonitor::TYPE_IMAGE_CODEC);
+                    break;
+                case MediaResource::SubType::kUnspecifiedSubType:
+                    break;
             }
         }
     }
 }
 
-binder_status_t ResourceManagerService::dump(
-        int fd, const char** /*args*/, uint32_t /*numArgs*/) {
+binder_status_t ResourceManagerService::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) {
     String8 result;
 
     if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
@@ -275,8 +300,7 @@
     return OK;
 }
 
-struct SystemCallbackImpl :
-        public ResourceManagerService::SystemCallbackInterface {
+struct SystemCallbackImpl : public ResourceManagerService::SystemCallbackInterface {
     SystemCallbackImpl() : mClientToken(new BBinder()) {}
 
     virtual void noteStartVideo(int uid) override {
@@ -303,8 +327,7 @@
 ResourceManagerService::ResourceManagerService()
     : ResourceManagerService(new ProcessInfo(), new SystemCallbackImpl()) {}
 
-ResourceManagerService::ResourceManagerService(
-        const sp<ProcessInfoInterface> &processInfo,
+ResourceManagerService::ResourceManagerService(const sp<ProcessInfoInterface> &processInfo,
         const sp<SystemCallbackInterface> &systemResource)
     : mProcessInfo(processInfo),
       mSystemCB(systemResource),
@@ -362,8 +385,8 @@
     return Status::ok();
 }
 
-void ResourceManagerService::onFirstAdded(
-        const MediaResourceParcel& resource, const ResourceInfo& clientInfo) {
+void ResourceManagerService::onFirstAdded(const MediaResourceParcel& resource,
+        const ResourceInfo& clientInfo) {
     // first time added
     if (resource.type == MediaResource::Type::kCpuBoost
      && resource.subType == MediaResource::SubType::kUnspecifiedSubType) {
@@ -380,8 +403,8 @@
     }
 }
 
-void ResourceManagerService::onLastRemoved(
-        const MediaResourceParcel& resource, const ResourceInfo& clientInfo) {
+void ResourceManagerService::onLastRemoved(const MediaResourceParcel& resource,
+        const ResourceInfo& clientInfo) {
     if (resource.type == MediaResource::Type::kCpuBoost
             && resource.subType == MediaResource::SubType::kUnspecifiedSubType
             && mCpuBoostCount > 0) {
@@ -394,8 +417,8 @@
     }
 }
 
-void ResourceManagerService::mergeResources(
-        MediaResourceParcel& r1, const MediaResourceParcel& r2) {
+void ResourceManagerService::mergeResources(MediaResourceParcel& r1,
+        const MediaResourceParcel& r2) {
     // The resource entry on record is maintained to be in [0,INT64_MAX].
     // Clamp if merging in the new resource value causes it to go out of bound.
     // Note that the new resource value could be negative, eg.DrmSession, the
@@ -411,10 +434,7 @@
     }
 }
 
-Status ResourceManagerService::addResource(
-        int32_t pid,
-        int32_t uid,
-        int64_t clientId,
+Status ResourceManagerService::addResource(int32_t pid, int32_t uid, int64_t clientId,
         const std::shared_ptr<IResourceManagerClient>& client,
         const std::vector<MediaResourceParcel>& resources) {
     String8 log = String8::format("addResource(pid %d, clientId %lld, resources %s)",
@@ -422,11 +442,11 @@
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
-    if (!mProcessInfo->isValidPid(pid)) {
+    if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         uid_t callingUid = IPCThreadState::self()->getCallingUid();
-        ALOGW("%s called with untrusted pid %d, using calling pid %d, uid %d", __FUNCTION__,
-                pid, callingPid, callingUid);
+        ALOGW("%s called with untrusted pid %d or uid %d, using calling pid %d, uid %d",
+                __FUNCTION__, pid, uid, callingPid, callingUid);
         pid = callingPid;
         uid = callingUid;
     }
@@ -463,7 +483,7 @@
         }
     }
     if (info.cookie == 0 && client != nullptr) {
-        info.cookie = addCookieAndLink_l(client->asBinder(),
+        info.cookie = addCookieAndLink_l(client,
                 new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
@@ -473,15 +493,14 @@
     return Status::ok();
 }
 
-Status ResourceManagerService::removeResource(
-        int32_t pid, int64_t clientId,
+Status ResourceManagerService::removeResource(int32_t pid, int64_t clientId,
         const std::vector<MediaResourceParcel>& resources) {
     String8 log = String8::format("removeResource(pid %d, clientId %lld, resources %s)",
             pid, (long long) clientId, getString(resources).string());
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
-    if (!mProcessInfo->isValidPid(pid)) {
+    if (!mProcessInfo->isPidTrusted(pid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
                 pid, callingPid);
@@ -549,7 +568,7 @@
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
-    if (checkValid && !mProcessInfo->isValidPid(pid)) {
+    if (checkValid && !mProcessInfo->isPidTrusted(pid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
                 pid, callingPid);
@@ -573,7 +592,7 @@
         onLastRemoved(it->second, info);
     }
 
-    removeCookieAndUnlink_l(info.client->asBinder(), info.cookie);
+    removeCookieAndUnlink_l(info.client, info.cookie);
 
     if (mObserverService != nullptr && !info.resources.empty()) {
         mObserverService->onResourceRemoved(info.uid, pid, info.resources);
@@ -583,22 +602,19 @@
     return Status::ok();
 }
 
-void ResourceManagerService::getClientForResource_l(
-        int callingPid, const MediaResourceParcel *res,
+void ResourceManagerService::getClientForResource_l(int callingPid, const MediaResourceParcel *res,
         Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     if (res == NULL) {
         return;
     }
     std::shared_ptr<IResourceManagerClient> client;
-    if (getLowestPriorityBiggestClient_l(callingPid, res->type, &client)) {
+    if (getLowestPriorityBiggestClient_l(callingPid, res->type, res->subType, &client)) {
         clients->push_back(client);
     }
 }
 
-Status ResourceManagerService::reclaimResource(
-        int32_t callingPid,
-        const std::vector<MediaResourceParcel>& resources,
-        bool* _aidl_return) {
+Status ResourceManagerService::reclaimResource(int32_t callingPid,
+        const std::vector<MediaResourceParcel>& resources, bool* _aidl_return) {
     String8 log = String8::format("reclaimResource(callingPid %d, resources %s)",
             callingPid, getString(resources).string());
     mServiceLog->add(log);
@@ -607,7 +623,7 @@
     Vector<std::shared_ptr<IResourceManagerClient>> clients;
     {
         Mutex::Autolock lock(mLock);
-        if (!mProcessInfo->isValidPid(callingPid)) {
+        if (!mProcessInfo->isPidTrusted(callingPid)) {
             pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
             ALOGW("%s called with untrusted pid %d, using actual calling pid %d", __FUNCTION__,
                     callingPid, actualCallingPid);
@@ -618,34 +634,43 @@
         const MediaResourceParcel *graphicMemory = NULL;
         const MediaResourceParcel *drmSession = NULL;
         for (size_t i = 0; i < resources.size(); ++i) {
-            MediaResource::Type type = resources[i].type;
-            if (resources[i].type == MediaResource::Type::kSecureCodec) {
-                secureCodec = &resources[i];
-            } else if (type == MediaResource::Type::kNonSecureCodec) {
-                nonSecureCodec = &resources[i];
-            } else if (type == MediaResource::Type::kGraphicMemory) {
-                graphicMemory = &resources[i];
-            } else if (type == MediaResource::Type::kDrmSession) {
-                drmSession = &resources[i];
+            switch (resources[i].type) {
+                case MediaResource::Type::kSecureCodec:
+                    secureCodec = &resources[i];
+                    break;
+                case MediaResource::Type::kNonSecureCodec:
+                    nonSecureCodec = &resources[i];
+                    break;
+                case MediaResource::Type::kGraphicMemory:
+                    graphicMemory = &resources[i];
+                    break;
+                case MediaResource::Type::kDrmSession:
+                    drmSession = &resources[i];
+                    break;
+                default:
+                    break;
             }
         }
 
         // first pass to handle secure/non-secure codec conflict
         if (secureCodec != NULL) {
             if (!mSupportsMultipleSecureCodecs) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec, &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
+                            secureCodec->subType, &clients)) {
                     return Status::ok();
                 }
             }
             if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec, &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::Type::kNonSecureCodec,
+                            secureCodec->subType, &clients)) {
                     return Status::ok();
                 }
             }
         }
         if (nonSecureCodec != NULL) {
             if (!mSupportsSecureWithNonSecureCodec) {
-                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec, &clients)) {
+                if (!getAllClients_l(callingPid, MediaResource::Type::kSecureCodec,
+                        nonSecureCodec->subType, &clients)) {
                     return Status::ok();
                 }
             }
@@ -671,21 +696,21 @@
         if (clients.size() == 0) {
             // if we are here, run the fourth pass to free one codec with the different type.
             if (secureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kNonSecureCodec, 1);
+                MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
             if (nonSecureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kSecureCodec, 1);
+                MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
         }
     }
 
-    *_aidl_return = reclaimInternal(clients);
+    *_aidl_return = reclaimUnconditionallyFrom(clients);
     return Status::ok();
 }
 
-bool ResourceManagerService::reclaimInternal(
+bool ResourceManagerService::reclaimUnconditionallyFrom(
         const Vector<std::shared_ptr<IResourceManagerClient>> &clients) {
     if (clients.size() == 0) {
         return false;
@@ -707,6 +732,7 @@
         return true;
     }
 
+    int failedClientPid = -1;
     {
         Mutex::Autolock lock(mLock);
         bool found = false;
@@ -721,20 +747,21 @@
                 }
             }
             if (found) {
+                failedClientPid = mMap.keyAt(i);
                 break;
             }
         }
-        if (!found) {
-            ALOGV("didn't find failed client");
+        if (found) {
+            ALOGW("Failed to reclaim resources from client with pid %d", failedClientPid);
+        } else {
+            ALOGW("Failed to reclaim resources from unlocateable client");
         }
     }
 
     return false;
 }
 
-Status ResourceManagerService::overridePid(
-        int originalPid,
-        int newPid) {
+Status ResourceManagerService::overridePid(int originalPid, int newPid) {
     String8 log = String8::format("overridePid(originalPid %d, newPid %d)",
             originalPid, newPid);
     mServiceLog->add(log);
@@ -763,9 +790,7 @@
 }
 
 Status ResourceManagerService::overrideProcessInfo(
-        const std::shared_ptr<IResourceManagerClient>& client,
-        int pid,
-        int procState,
+        const std::shared_ptr<IResourceManagerClient>& client, int pid, int procState,
         int oomScore) {
     String8 log = String8::format("overrideProcessInfo(pid %d, procState %d, oomScore %d)",
             pid, procState, oomScore);
@@ -791,7 +816,7 @@
         return Status::fromServiceSpecificError(BAD_VALUE);
     }
 
-    uintptr_t cookie = addCookieAndLink_l(client->asBinder(),
+    uintptr_t cookie = addCookieAndLink_l(client,
             new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
 
     mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
@@ -800,23 +825,28 @@
 }
 
 uintptr_t ResourceManagerService::addCookieAndLink_l(
-        ::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier) {
+        const std::shared_ptr<IResourceManagerClient>& client, const sp<DeathNotifier>& notifier) {
+    if (client == nullptr) {
+        return 0;
+    }
     std::scoped_lock lock{sCookieLock};
 
     uintptr_t cookie;
     // Need to skip cookie 0 (if it wraps around). ResourceInfo has cookie initialized to 0
     // indicating the death notifier is not created yet.
     while ((cookie = ++sCookieCounter) == 0);
-    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+    AIBinder_linkToDeath(client->asBinder().get(), mDeathRecipient.get(), (void*)cookie);
     sCookieToDeathNotifierMap.emplace(cookie, notifier);
 
     return cookie;
 }
 
 void ResourceManagerService::removeCookieAndUnlink_l(
-        ::ndk::SpAIBinder binder, uintptr_t cookie) {
+         const std::shared_ptr<IResourceManagerClient>& client, uintptr_t cookie) {
     std::scoped_lock lock{sCookieLock};
-    AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+    if (client != nullptr) {
+        AIBinder_unlinkToDeath(client->asBinder().get(), mDeathRecipient.get(), (void*)cookie);
+    }
     sCookieToDeathNotifierMap.erase(cookie);
 }
 
@@ -834,7 +864,7 @@
 
     mProcessInfo->removeProcessInfoOverride(pid);
 
-    removeCookieAndUnlink_l(it->second.client->asBinder(), it->second.cookie);
+    removeCookieAndUnlink_l(it->second.client, it->second.cookie);
 
     mProcessInfoOverrideMap.erase(pid);
 }
@@ -846,7 +876,7 @@
     mServiceLog->add(log);
 
     Mutex::Autolock lock(mLock);
-    if (!mProcessInfo->isValidPid(pid)) {
+    if (!mProcessInfo->isPidTrusted(pid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
                 pid, callingPid);
@@ -878,7 +908,7 @@
     Vector<std::shared_ptr<IResourceManagerClient>> clients;
     {
         Mutex::Autolock lock(mLock);
-        if (!mProcessInfo->isValidPid(pid)) {
+        if (!mProcessInfo->isPidTrusted(pid)) {
             pid_t callingPid = IPCThreadState::self()->getCallingPid();
             ALOGW("%s called with untrusted pid %d, using calling pid %d", __FUNCTION__,
                     pid, callingPid);
@@ -889,16 +919,34 @@
                                          MediaResource::Type::kNonSecureCodec,
                                          MediaResource::Type::kGraphicMemory,
                                          MediaResource::Type::kDrmSession}) {
-            std::shared_ptr<IResourceManagerClient> client;
-            if (getBiggestClient_l(pid, type, &client, true /* pendingRemovalOnly */)) {
-                clients.add(client);
-                break;
+            switch (type) {
+                // Codec resources are segregated by audio, video and image domains.
+                case MediaResource::Type::kSecureCodec:
+                case MediaResource::Type::kNonSecureCodec:
+                    for (MediaResource::SubType subType : {MediaResource::SubType::kAudioCodec,
+                                                           MediaResource::SubType::kVideoCodec,
+                                                           MediaResource::SubType::kImageCodec}) {
+                        std::shared_ptr<IResourceManagerClient> client;
+                        if (getBiggestClientPendingRemoval_l(pid, type, subType, &client)) {
+                            clients.add(client);
+                            continue;
+                        }
+                    }
+                    break;
+                // Non-codec resources are shared by audio, video and image codecs (no subtype).
+                default:
+                    std::shared_ptr<IResourceManagerClient> client;
+                    if (getBiggestClientPendingRemoval_l(pid, type,
+                            MediaResource::SubType::kUnspecifiedSubType, &client)) {
+                        clients.add(client);
+                    }
+                    break;
             }
         }
     }
 
     if (!clients.empty()) {
-        reclaimInternal(clients);
+        reclaimUnconditionallyFrom(clients);
     }
     return Status::ok();
 }
@@ -915,14 +963,13 @@
     return mProcessInfo->getPriority(newPid, priority);
 }
 
-bool ResourceManagerService::getAllClients_l(
-        int callingPid, MediaResource::Type type,
-        Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
+bool ResourceManagerService::getAllClients_l(int callingPid, MediaResource::Type type,
+        MediaResource::SubType subType, Vector<std::shared_ptr<IResourceManagerClient>> *clients) {
     Vector<std::shared_ptr<IResourceManagerClient>> temp;
     for (size_t i = 0; i < mMap.size(); ++i) {
         ResourceInfos &infos = mMap.editValueAt(i);
         for (size_t j = 0; j < infos.size(); ++j) {
-            if (hasResourceType(type, infos[j].resources)) {
+            if (hasResourceType(type, subType, infos[j].resources)) {
                 if (!isCallingPriorityHigher_l(callingPid, mMap.keyAt(i))) {
                     // some higher/equal priority process owns the resource,
                     // this request can't be fulfilled.
@@ -942,8 +989,8 @@
     return true;
 }
 
-bool ResourceManagerService::getLowestPriorityBiggestClient_l(
-        int callingPid, MediaResource::Type type,
+bool ResourceManagerService::getLowestPriorityBiggestClient_l(int callingPid,
+        MediaResource::Type type, MediaResource::SubType subType,
         std::shared_ptr<IResourceManagerClient> *client) {
     int lowestPriorityPid;
     int lowestPriority;
@@ -951,7 +998,7 @@
 
     // Before looking into other processes, check if we have clients marked for
     // pending removal in the same process.
-    if (getBiggestClient_l(callingPid, type, client, true /* pendingRemovalOnly */)) {
+    if (getBiggestClientPendingRemoval_l(callingPid, type, subType, client)) {
         return true;
     }
     if (!getPriority_l(callingPid, &callingPriority)) {
@@ -959,7 +1006,7 @@
                 callingPid);
         return false;
     }
-    if (!getLowestPriorityPid_l(type, &lowestPriorityPid, &lowestPriority)) {
+    if (!getLowestPriorityPid_l(type, subType, &lowestPriorityPid, &lowestPriority)) {
         return false;
     }
     if (lowestPriority <= callingPriority) {
@@ -968,14 +1015,14 @@
         return false;
     }
 
-    if (!getBiggestClient_l(lowestPriorityPid, type, client)) {
+    if (!getBiggestClient_l(lowestPriorityPid, type, subType, client)) {
         return false;
     }
     return true;
 }
 
-bool ResourceManagerService::getLowestPriorityPid_l(
-        MediaResource::Type type, int *lowestPriorityPid, int *lowestPriority) {
+bool ResourceManagerService::getLowestPriorityPid_l(MediaResource::Type type,
+        MediaResource::SubType subType, int *lowestPriorityPid, int *lowestPriority) {
     int pid = -1;
     int priority = -1;
     for (size_t i = 0; i < mMap.size(); ++i) {
@@ -983,7 +1030,7 @@
             // no client on this process.
             continue;
         }
-        if (!hasResourceType(type, mMap.valueAt(i))) {
+        if (!hasResourceType(type, subType, mMap.valueAt(i))) {
             // doesn't have the requested resource type
             continue;
         }
@@ -1021,8 +1068,13 @@
     return (callingPidPriority < priority);
 }
 
-bool ResourceManagerService::getBiggestClient_l(
-        int pid, MediaResource::Type type, std::shared_ptr<IResourceManagerClient> *client,
+bool ResourceManagerService::getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
+        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client) {
+    return getBiggestClient_l(pid, type, subType, client, true /* pendingRemovalOnly */);
+}
+
+bool ResourceManagerService::getBiggestClient_l(int pid, MediaResource::Type type,
+        MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client,
         bool pendingRemovalOnly) {
     ssize_t index = mMap.indexOfKey(pid);
     if (index < 0) {
@@ -1041,7 +1093,7 @@
         }
         for (auto it = resources.begin(); it != resources.end(); it++) {
             const MediaResourceParcel &resource = it->second;
-            if (resource.type == type) {
+            if (hasResourceType(type, subType, resource)) {
                 if (resource.value > largestValue) {
                     largestValue = resource.value;
                     clientTemp = infos[i].client;
@@ -1052,8 +1104,8 @@
 
     if (clientTemp == NULL) {
         ALOGE_IF(!pendingRemovalOnly,
-                 "getBiggestClient_l: can't find resource type %s for pid %d",
-                 asString(type), pid);
+                 "getBiggestClient_l: can't find resource type %s and subtype %s for pid %d",
+                 asString(type), asString(subType), pid);
         return false;
     }
 
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 9c2636e..c636a0f 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -77,26 +77,19 @@
             int /*fd*/, const char** /*args*/, uint32_t /*numArgs*/);
 
     ResourceManagerService();
-    explicit ResourceManagerService(
-            const sp<ProcessInfoInterface> &processInfo,
+    explicit ResourceManagerService(const sp<ProcessInfoInterface> &processInfo,
             const sp<SystemCallbackInterface> &systemResource);
     virtual ~ResourceManagerService();
-    void setObserverService(
-            const std::shared_ptr<ResourceObserverService>& observerService);
+    void setObserverService(const std::shared_ptr<ResourceObserverService>& observerService);
 
     // IResourceManagerService interface
     Status config(const std::vector<MediaResourcePolicyParcel>& policies) override;
 
-    Status addResource(
-            int32_t pid,
-            int32_t uid,
-            int64_t clientId,
+    Status addResource(int32_t pid, int32_t uid, int64_t clientId,
             const std::shared_ptr<IResourceManagerClient>& client,
             const std::vector<MediaResourceParcel>& resources) override;
 
-    Status removeResource(
-            int32_t pid,
-            int64_t clientId,
+    Status removeResource(int32_t pid, int64_t clientId,
             const std::vector<MediaResourceParcel>& resources) override;
 
     Status removeClient(int32_t pid, int64_t clientId) override;
@@ -104,20 +97,13 @@
     // Tries to reclaim resource from processes with lower priority than the calling process
     // according to the requested resources.
     // Returns true if any resource has been reclaimed, otherwise returns false.
-    Status reclaimResource(
-            int32_t callingPid,
-            const std::vector<MediaResourceParcel>& resources,
+    Status reclaimResource(int32_t callingPid, const std::vector<MediaResourceParcel>& resources,
             bool* _aidl_return) override;
 
-    Status overridePid(
-            int originalPid,
-            int newPid) override;
+    Status overridePid(int originalPid, int newPid) override;
 
-    Status overrideProcessInfo(
-            const std::shared_ptr<IResourceManagerClient>& client,
-            int pid,
-            int procState,
-            int oomScore) override;
+    Status overrideProcessInfo(const std::shared_ptr<IResourceManagerClient>& client, int pid,
+            int procState, int oomScore) override;
 
     Status markClientForPendingRemoval(int32_t pid, int64_t clientId) override;
 
@@ -132,30 +118,34 @@
 
     // Reclaims resources from |clients|. Returns true if reclaim succeeded
     // for all clients.
-    bool reclaimInternal(
-            const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
+    bool reclaimUnconditionallyFrom(const Vector<std::shared_ptr<IResourceManagerClient>> &clients);
 
     // Gets the list of all the clients who own the specified resource type.
     // Returns false if any client belongs to a process with higher priority than the
     // calling process. The clients will remain unchanged if returns false.
-    bool getAllClients_l(int callingPid, MediaResource::Type type,
+    bool getAllClients_l(int callingPid, MediaResource::Type type, MediaResource::SubType subType,
             Vector<std::shared_ptr<IResourceManagerClient>> *clients);
 
     // Gets the client who owns specified resource type from lowest possible priority process.
     // Returns false if the calling process priority is not higher than the lowest process
     // priority. The client will remain unchanged if returns false.
     bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
-            std::shared_ptr<IResourceManagerClient> *client);
+            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
 
     // Gets lowest priority process that has the specified resource type.
     // Returns false if failed. The output parameters will remain unchanged if failed.
-    bool getLowestPriorityPid_l(MediaResource::Type type, int *pid, int *priority);
+    bool getLowestPriorityPid_l(MediaResource::Type type, MediaResource::SubType subType, int *pid,
+                int *priority);
 
     // Gets the client who owns biggest piece of specified resource type from pid.
-    // Returns false if failed. The client will remain unchanged if failed.
-    bool getBiggestClient_l(int pid, MediaResource::Type type,
+    // Returns false with no change to client if there are no clients holdiing resources of thisi
+    // type.
+    bool getBiggestClient_l(int pid, MediaResource::Type type, MediaResource::SubType subType,
             std::shared_ptr<IResourceManagerClient> *client,
             bool pendingRemovalOnly = false);
+    // Same method as above, but with pendingRemovalOnly as true.
+    bool getBiggestClientPendingRemoval_l(int pid, MediaResource::Type type,
+            MediaResource::SubType subType, std::shared_ptr<IResourceManagerClient> *client);
 
     bool isCallingPriorityHigher_l(int callingPid, int pid);
 
@@ -176,8 +166,10 @@
     void removeProcessInfoOverride(int pid);
 
     void removeProcessInfoOverride_l(int pid);
-    uintptr_t addCookieAndLink_l(::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier);
-    void removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie);
+    uintptr_t addCookieAndLink_l(const std::shared_ptr<IResourceManagerClient>& client,
+                                 const sp<DeathNotifier>& notifier);
+    void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
+                                 uintptr_t cookie);
 
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
diff --git a/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
index af2ba68..72a0551 100644
--- a/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/MediaResourceSubType.aidl
@@ -26,4 +26,5 @@
     kUnspecifiedSubType = 0,
     kAudioCodec = 1,
     kVideoCodec = 2,
+    kImageCodec = 3,
 }
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index 6690b16..e4aaea0 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -71,7 +71,8 @@
         return true;
     }
 
-    virtual bool isValidPid(int /* pid */) { return true; }
+    virtual bool isPidTrusted(int /* pid */) { return true; }
+    virtual bool isPidUidTrusted(int /* pid */, int /* uid */) { return true; }
     virtual bool overrideProcessInfo(int /* pid */, int /*procState*/, int /*oomScore*/) {
         return true;
     }
@@ -226,33 +227,31 @@
         mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinThreadPairs, kMaxThreadPairs);
     // Make even number of threads
     size_t numThreads = numThreadPairs * 2;
-    resourceThreadArgs threadArgs;
-    vector<MediaResourceParcel> mediaResource;
+    resourceThreadArgs threadArgs[numThreadPairs];
+    vector<MediaResourceParcel> mediaResource[numThreadPairs];
     pthread_t pt[numThreads];
-    int i;
-    for (i = 0; i < numThreads - 1; i += 2) {
-        threadArgs.pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
-        threadArgs.uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+    for (int k = 0; k < numThreadPairs; ++k) {
+        threadArgs[k].pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+        threadArgs[k].uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
         int32_t mediaResourceType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
             kMinResourceType, kMaxResourceType);
         int32_t mediaResourceSubType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
             kMinResourceType, kMaxResourceType);
         uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
-        threadArgs.service = mService;
+        threadArgs[k].service = mService;
         shared_ptr<IResourceManagerClient> testClient =
-            ::ndk::SharedRefBase::make<TestClient>(threadArgs.pid, mService);
-        threadArgs.testClient = testClient;
-        threadArgs.testClientId = getId(testClient);
-        mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
-                                              static_cast<MedResSubType>(mediaResourceSubType),
-                                              mediaResourceValue));
-        threadArgs.mediaResource = mediaResource;
-        pthread_create(&pt[i], nullptr, addResource, &threadArgs);
-        pthread_create(&pt[i + 1], nullptr, removeResource, &threadArgs);
-        mediaResource.clear();
+                ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, mService);
+        threadArgs[k].testClient = testClient;
+        threadArgs[k].testClientId = getId(testClient);
+        mediaResource[k].push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+                                                 static_cast<MedResSubType>(mediaResourceSubType),
+                                                 mediaResourceValue));
+        threadArgs[k].mediaResource = mediaResource[k];
+        pthread_create(&pt[2 * k], nullptr, addResource, &threadArgs[k]);
+        pthread_create(&pt[2 * k + 1], nullptr, removeResource, &threadArgs[k]);
     }
 
-    for (i = 0; i < numThreads; ++i) {
+    for (int i = 0; i < numThreads; ++i) {
         pthread_join(pt[i], nullptr);
     }
 
@@ -265,14 +264,14 @@
     int32_t mediaResourceSubType =
         mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
     uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
-    mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
-                                          static_cast<MedResSubType>(mediaResourceSubType),
-                                          mediaResourceValue));
+    vector<MediaResourceParcel> mediaRes;
+    mediaRes.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+                                     static_cast<MedResSubType>(mediaResourceSubType),
+                                     mediaResourceValue));
     bool result;
-    mService->reclaimResource(pidZero, mediaResource, &result);
-    mService->removeResource(pidZero, getId(testClient), mediaResource);
+    mService->reclaimResource(pidZero, mediaRes, &result);
+    mService->removeResource(pidZero, getId(testClient), mediaRes);
     mService->removeClient(pidZero, getId(testClient));
-    mediaResource.clear();
 }
 
 void ResourceManagerServiceFuzzer::setServiceLog() {
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index ec4ba58..618626f 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -56,7 +56,7 @@
     test_suites: ["device-tests"],
     static_libs: [
         "libresourcemanagerservice",
-        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "resourceobserver_aidl_interface-V1-ndk",
     ],
     shared_libs: [
         "libbinder",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 8e29312..5bf44ce 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -46,7 +46,11 @@
         return true;
     }
 
-    virtual bool isValidPid(int /* pid */) {
+    virtual bool isPidTrusted(int /* pid */) {
+        return true;
+    }
+
+    virtual bool isPidUidTrusted(int /* pid */, int /* uid */) {
         return true;
     }
 
@@ -119,11 +123,11 @@
 
 struct TestClient : public BnResourceManagerClient {
     TestClient(int pid, const std::shared_ptr<ResourceManagerService> &service)
-        : mReclaimed(false), mPid(pid), mService(service) {}
+        : mPid(pid), mService(service) {}
 
     Status reclaimResource(bool* _aidl_return) override {
         mService->removeClient(mPid, getId(ref<TestClient>()));
-        mReclaimed = true;
+        mWasReclaimResourceCalled = true;
         *_aidl_return = true;
         return Status::ok();
     }
@@ -133,18 +137,16 @@
         return Status::ok();
     }
 
-    bool reclaimed() const {
-        return mReclaimed;
-    }
-
-    void reset() {
-        mReclaimed = false;
+    bool checkIfReclaimedAndReset() {
+        bool wasReclaimResourceCalled = mWasReclaimResourceCalled;
+        mWasReclaimResourceCalled = false;
+        return wasReclaimResourceCalled;
     }
 
     virtual ~TestClient() {}
 
 private:
-    bool mReclaimed;
+    bool mWasReclaimResourceCalled = false;
     int mPid;
     std::shared_ptr<ResourceManagerService> mService;
     DISALLOW_EVIL_CONSTRUCTORS(TestClient);
@@ -166,14 +168,30 @@
     return lhs.type == rhs.type && lhs.arg == rhs.arg;
 }
 
-#define CHECK_STATUS_TRUE(condition) \
-    EXPECT_TRUE((condition).isOk() && (result))
+// The condition is expected to return a status but also update the local
+// result variable.
+#define CHECK_STATUS_TRUE(conditionThatUpdatesResult) \
+    do { \
+        bool result = false; \
+        EXPECT_TRUE((conditionThatUpdatesResult).isOk()); \
+        EXPECT_TRUE(result); \
+    } while(false)
 
-#define CHECK_STATUS_FALSE(condition) \
-    EXPECT_TRUE((condition).isOk() && !(result))
+// The condition is expected to return a status but also update the local
+// result variable.
+#define CHECK_STATUS_FALSE(conditionThatUpdatesResult) \
+    do { \
+        bool result = true; \
+        EXPECT_TRUE((conditionThatUpdatesResult).isOk()); \
+        EXPECT_FALSE(result); \
+    } while(false)
 
 class ResourceManagerServiceTestBase : public ::testing::Test {
 public:
+    static TestClient* toTestClient(std::shared_ptr<IResourceManagerClient> testClient) {
+        return static_cast<TestClient*>(testClient.get());
+    }
+
     ResourceManagerServiceTestBase()
         : mSystemCB(new TestSystemCallback()),
           mService(::ndk::SharedRefBase::make<ResourceManagerService>(
@@ -183,6 +201,10 @@
           mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, mService)) {
     }
 
+    std::shared_ptr<IResourceManagerClient> createTestClient(int pid) {
+        return ::ndk::SharedRefBase::make<TestClient>(pid, mService);
+    }
+
     sp<TestSystemCallback> mSystemCB;
     std::shared_ptr<ResourceManagerService> mService;
     std::shared_ptr<IResourceManagerClient> mTestClient1;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index a029d45..8739c3b 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -25,22 +25,60 @@
 namespace android {
 
 class ResourceManagerServiceTest : public ResourceManagerServiceTestBase {
+private:
+    static MediaResource createSecureVideoCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kSecureCodec,
+            MediaResource::SubType::kVideoCodec, amount);
+    }
+
+    static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kNonSecureCodec,
+            MediaResource::SubType::kVideoCodec, amount);
+    }
+
+    static MediaResource createSecureAudioCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kSecureCodec,
+            MediaResource::SubType::kAudioCodec, amount);
+    }
+
+    static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kNonSecureCodec,
+            MediaResource::SubType::kAudioCodec, amount);
+    }
+
+    static MediaResource createSecureImageCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kSecureCodec,
+            MediaResource::SubType::kImageCodec, amount);
+    }
+
+    static MediaResource createNonSecureImageCodecResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kNonSecureCodec,
+            MediaResource::SubType::kImageCodec, amount);
+    }
+
+    static MediaResource createGraphicMemoryResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kGraphicMemory,
+            MediaResource::SubType::kUnspecifiedSubType, amount);
+    }
+
+    static MediaResource createDrmSessionResource(int amount = 1) {
+        return MediaResource(MediaResource::Type::kDrmSession,
+            MediaResource::SubType::kUnspecifiedSubType, amount);
+    }
+
+    static MediaResource createBatteryResource() {
+        return MediaResource(MediaResource::Type::kBattery,
+            MediaResource::SubType::kUnspecifiedSubType, 1);
+    }
+
+    static MediaResource createCpuBoostResource() {
+        return MediaResource(MediaResource::Type::kCpuBoost,
+            MediaResource::SubType::kUnspecifiedSubType, 1);
+    }
+
 public:
     ResourceManagerServiceTest() : ResourceManagerServiceTestBase() {}
 
-    void verifyClients(bool c1, bool c2, bool c3) {
-        TestClient *client1 = static_cast<TestClient*>(mTestClient1.get());
-        TestClient *client2 = static_cast<TestClient*>(mTestClient2.get());
-        TestClient *client3 = static_cast<TestClient*>(mTestClient3.get());
-
-        EXPECT_EQ(c1, client1->reclaimed());
-        EXPECT_EQ(c2, client2->reclaimed());
-        EXPECT_EQ(c3, client3->reclaimed());
-
-        client1->reset();
-        client2->reset();
-        client3->reset();
-    }
 
     // test set up
     // ---------------------------------------------------------------------------------
@@ -268,7 +306,6 @@
 
     void testOverridePid() {
 
-        bool result;
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
@@ -293,8 +330,6 @@
     }
 
     void testMarkClientForPendingRemoval() {
-        bool result;
-
         {
             addResource();
             mService->mSupportsSecureWithNonSecureCodec = true;
@@ -307,13 +342,17 @@
 
             // no lower priority client
             CHECK_STATUS_FALSE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient2));
 
             // client marked for pending removal from the same process got reclaimed
             CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
@@ -331,11 +370,15 @@
             // client marked for pending removal from the same process got reclaimed
             // first, even though there are lower priority process
             CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // lower priority client got reclaimed
             CHECK_STATUS_TRUE(mService->reclaimResource(kTestPid2, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_EQ(true, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
@@ -349,17 +392,23 @@
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // No more clients marked for removal
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
-            verifyClients(false /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             mService->markClientForPendingRemoval(kTestPid2, getId(mTestClient3));
 
             // client marked for pending removal got reclaimed
             EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(kTestPid2).isOk());
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_EQ(false, toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_EQ(false, toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_EQ(true, toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 1 which still left
             mService->removeClient(kTestPid1, getId(mTestClient1));
@@ -384,14 +433,15 @@
 
     void testGetAllClients() {
         addResource();
-
         MediaResource::Type type = MediaResource::Type::kSecureCodec;
+        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
+
         Vector<std::shared_ptr<IResourceManagerClient> > clients;
-        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, subType, &clients));
         // some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
         // will fail.
-        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, &clients));
-        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, &clients));
+        EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, subType, &clients));
+        EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, subType, &clients));
 
         EXPECT_EQ(2u, clients.size());
         // (OK to require ordering in clients[], as the pid map is sorted)
@@ -400,7 +450,6 @@
     }
 
     void testReclaimResourceSecure() {
-        bool result;
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
@@ -417,11 +466,15 @@
 
             // reclaim all secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -439,7 +492,9 @@
 
             // reclaim all secure and non-secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, true /* c2 */, true /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -458,15 +513,21 @@
 
             // reclaim all non-secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one largest graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another largest graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -483,15 +544,21 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -508,20 +575,25 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // secure codec from lowest process got reclaimed
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another secure codec from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more secure codec, non-secure codec will be reclaimed.
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
         }
     }
 
     void testReclaimResourceNonSecure() {
-        bool result;
         std::vector<MediaResourceParcel> resources;
         resources.push_back(MediaResource(MediaResource::Type::kNonSecureCodec, 1));
         resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
@@ -537,11 +609,15 @@
 
             // reclaim all secure codecs
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim one graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -558,15 +634,21 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // one largest graphic memory from lowest process got reclaimed
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // call again should reclaim another graphic memory from lowest process
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // nothing left
             CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, resources, &result));
@@ -582,11 +664,15 @@
 
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
             // one non secure codec from lowest process got reclaimed
-            verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
+            EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // no more non-secure codec, secure codec from lowest priority process will be reclaimed
             CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, resources, &result));
-            verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
+            EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+            EXPECT_FALSE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
 
             // clean up client 3 which still left
             mService->removeClient(kTestPid2, getId(mTestClient3));
@@ -595,13 +681,17 @@
 
     void testGetLowestPriorityBiggestClient() {
         MediaResource::Type type = MediaResource::Type::kGraphicMemory;
+        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
         std::shared_ptr<IResourceManagerClient> client;
-        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
+        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
+                &client));
 
         addResource();
 
-        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, &client));
-        EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
+        EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, subType,
+                &client));
+        EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, subType,
+                &client));
 
         // kTestPid1 is the lowest priority process with MediaResource::Type::kGraphicMemory.
         // mTestClient1 has the largest MediaResource::Type::kGraphicMemory within kTestPid1.
@@ -614,35 +704,25 @@
         TestProcessInfo processInfo;
 
         MediaResource::Type type = MediaResource::Type::kGraphicMemory;
-        EXPECT_FALSE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+        MediaResource::SubType subType = MediaResource::SubType::kUnspecifiedSubType;
+        EXPECT_FALSE(mService->getLowestPriorityPid_l(type, subType, &pid, &priority));
 
         addResource();
 
-        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, subType, &pid, &priority));
         EXPECT_EQ(kTestPid1, pid);
         int priority1;
         processInfo.getPriority(kTestPid1, &priority1);
         EXPECT_EQ(priority1, priority);
 
         type = MediaResource::Type::kNonSecureCodec;
-        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+        EXPECT_TRUE(mService->getLowestPriorityPid_l(type, subType, &pid, &priority));
         EXPECT_EQ(kTestPid2, pid);
         int priority2;
         processInfo.getPriority(kTestPid2, &priority2);
         EXPECT_EQ(priority2, priority);
     }
 
-    void testGetBiggestClient() {
-        MediaResource::Type type = MediaResource::Type::kGraphicMemory;
-        std::shared_ptr<IResourceManagerClient> client;
-        EXPECT_FALSE(mService->getBiggestClient_l(kTestPid2, type, &client));
-
-        addResource();
-
-        EXPECT_TRUE(mService->getBiggestClient_l(kTestPid2, type, &client));
-        EXPECT_EQ(mTestClient2, client);
-    }
-
     void testIsCallingPriorityHigher() {
         EXPECT_FALSE(mService->isCallingPriorityHigher_l(101, 100));
         EXPECT_FALSE(mService->isCallingPriorityHigher_l(100, 100));
@@ -725,6 +805,361 @@
         EXPECT_EQ(4u, mSystemCB->eventCount());
         EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
     }
+
+    void testReclaimResources_withVideoCodec_reclaimsOnlyVideoCodec() {
+        const std::shared_ptr<IResourceManagerClient>& audioImageTestClient = mTestClient1;
+        const std::shared_ptr<IResourceManagerClient>& videoTestClient = mTestClient2;
+
+        // Create an audio and image codec resource
+        std::vector<MediaResourceParcel> audioImageResources;
+        audioImageResources.push_back(createNonSecureAudioCodecResource());
+        audioImageResources.push_back(createNonSecureImageCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(audioImageTestClient),
+                audioImageTestClient, audioImageResources);
+
+        // Fail to reclaim a video codec resource
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureVideoCodecResource());
+        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Now add a video codec resource
+        std::vector<MediaResourceParcel> videoResources;
+        videoResources.push_back(createNonSecureVideoCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoTestClient), videoTestClient,
+                videoResources);
+
+        // Verify that the newly-created video codec resource can be reclaimed
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the audio and image resources are untouched
+        EXPECT_FALSE(toTestClient(audioImageTestClient)->checkIfReclaimedAndReset());
+        // But the video resource was reclaimed
+        EXPECT_TRUE(toTestClient(videoTestClient)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResources_withAudioCodec_reclaimsOnlyAudioCodec() {
+        const auto & videoImageTestClient = mTestClient1;
+        const auto & audioTestClient = mTestClient2;
+
+        // Create a video and audio codec resource
+        std::vector<MediaResourceParcel> videoImageResources;
+        videoImageResources.push_back(createNonSecureVideoCodecResource());
+        videoImageResources.push_back(createNonSecureImageCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoImageTestClient),
+                videoImageTestClient, videoImageResources);
+
+        // Fail to reclaim an audio codec resource
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureAudioCodecResource());
+        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Now add an audio codec resource
+        std::vector<MediaResourceParcel> audioResources;
+        audioResources.push_back(createNonSecureAudioCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid2, getId(audioTestClient), audioTestClient,
+                audioResources);
+
+        // Verify that the newly-created audio codec resource can be reclaimed
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the video and image resources are untouched
+        EXPECT_FALSE(toTestClient(videoImageTestClient)->checkIfReclaimedAndReset());
+        // But the audio resource was reclaimed
+        EXPECT_TRUE(toTestClient(audioTestClient)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResources_withImageCodec_reclaimsOnlyImageCodec() {
+        const auto & videoAudioTestClient = mTestClient1;
+        const auto & imageTestClient = mTestClient2;
+
+        // Create a video and audio codec resource
+        std::vector<MediaResourceParcel> videoAudioResources;
+        videoAudioResources.push_back(createNonSecureVideoCodecResource());
+        videoAudioResources.push_back(createNonSecureAudioCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid1, getId(videoAudioTestClient),
+                videoAudioTestClient, videoAudioResources);
+
+        // Fail to reclaim an image codec resource
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureImageCodecResource());
+        CHECK_STATUS_FALSE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Now add an image codec resource
+        std::vector<MediaResourceParcel> imageResources;
+        imageResources.push_back(createNonSecureImageCodecResource());
+        mService->addResource(kLowPriorityPid, kTestUid2, getId(imageTestClient), imageTestClient,
+                imageResources);
+
+        // Verify that the newly-created image codec resource can be reclaimed
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the video and audio resources are untouched
+        EXPECT_FALSE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+        // But the image resource was reclaimed
+        EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResources_whenPartialResourceMatch_reclaims() {
+        const int onlyUid = kTestUid1;
+        const auto onlyClient = createTestClient(kLowPriorityPid);
+
+        std::vector<MediaResourceParcel> ownedResources;
+        ownedResources.push_back(createNonSecureVideoCodecResource());
+        ownedResources.push_back(createGraphicMemoryResource(100));
+        mService->addResource(kLowPriorityPid, onlyUid, getId(onlyClient), onlyClient,
+                ownedResources);
+
+        // Reclaim an image codec instead of the video codec that is owned, but also reclaim
+        // graphics memory, which will trigger the reclaim.
+        std::vector<MediaResourceParcel> reclaimResources;
+        reclaimResources.push_back(createNonSecureImageCodecResource());
+        reclaimResources.push_back(createGraphicMemoryResource(100));
+        CHECK_STATUS_TRUE(mService->reclaimResource(kHighPriorityPid, reclaimResources, &result));
+
+        // Verify that the video codec resources (including the needed graphic memory) is reclaimed
+        EXPECT_TRUE(toTestClient(onlyClient)->checkIfReclaimedAndReset());
+    }
+
+    void testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources() {
+        // this test only uses one pid and one uid
+        const int onlyPid = kTestPid1;
+        const int onlyUid = kTestUid1;
+
+        // secure video codec
+        const auto smallSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largeSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largestSecureVideoActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createSecureVideoCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallSecureVideoMarkedClient),
+                    smallSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureVideoCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeSecureVideoMarkedClient),
+                    largeSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureVideoCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
+                    largestSecureVideoActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureVideoMarkedClient));
+        // don't mark the largest client
+
+        // non-secure video codec
+        const auto smallNonSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largeNonSecureVideoMarkedClient = createTestClient(onlyPid);
+        const auto largestNonSecureVideoActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createNonSecureVideoCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureVideoMarkedClient),
+                    smallNonSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureVideoCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureVideoMarkedClient),
+                    largeNonSecureVideoMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureVideoCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureVideoActiveClient),
+                    largestNonSecureVideoActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureVideoMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureVideoMarkedClient));
+        // don't mark the largest client
+
+        // secure audio codec
+        const auto smallSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largeSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largestSecureAudioActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createSecureAudioCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallSecureAudioMarkedClient),
+                    smallSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureAudioCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeSecureAudioMarkedClient),
+                    largeSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureAudioCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestSecureVideoActiveClient),
+                    largestSecureVideoActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureAudioMarkedClient));
+        // don't mark the largest client
+
+        // non-secure audio codec
+        const auto smallNonSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largeNonSecureAudioMarkedClient = createTestClient(onlyPid);
+        const auto largestNonSecureAudioActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createNonSecureAudioCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureAudioMarkedClient),
+                    smallNonSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureAudioCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureAudioMarkedClient),
+                    largeNonSecureAudioMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureAudioCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureAudioActiveClient),
+                    largestNonSecureAudioActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureAudioMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureAudioMarkedClient));
+        // don't mark the largest client
+
+        // secure image codec
+        const auto smallSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largeSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largestSecureImageActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createSecureImageCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallSecureImageMarkedClient),
+                    smallSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureImageCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeSecureImageMarkedClient),
+                    largeSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createSecureImageCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestSecureImageActiveClient),
+                    largestSecureImageActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeSecureImageMarkedClient));
+        // don't mark the largest client
+
+        // non-secure image codec
+        const auto smallNonSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largeNonSecureImageMarkedClient = createTestClient(onlyPid);
+        const auto largestNonSecureImageActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createNonSecureImageCodecResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallNonSecureImageMarkedClient),
+                    smallNonSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureImageCodecResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeNonSecureImageMarkedClient),
+                    largeNonSecureImageMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createNonSecureImageCodecResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestNonSecureImageActiveClient),
+                    largestNonSecureImageActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallNonSecureImageMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeNonSecureImageMarkedClient));
+        // don't mark the largest client
+
+        // graphic memory
+        const auto smallGraphicMemoryMarkedClient = createTestClient(onlyPid);
+        const auto largeGraphicMemoryMarkedClient = createTestClient(onlyPid);
+        const auto largestGraphicMemoryActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createGraphicMemoryResource(100));
+            mService->addResource(onlyPid, onlyUid, getId(smallGraphicMemoryMarkedClient),
+                    smallGraphicMemoryMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createGraphicMemoryResource(200));
+            mService->addResource(onlyPid, onlyUid, getId(largeGraphicMemoryMarkedClient),
+                    largeGraphicMemoryMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createGraphicMemoryResource(300));
+            mService->addResource(onlyPid, onlyUid, getId(largestGraphicMemoryActiveClient),
+                    largestGraphicMemoryActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallGraphicMemoryMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeGraphicMemoryMarkedClient));
+        // don't mark the largest client
+
+        // DRM session
+        const auto smallDrmSessionMarkedClient = createTestClient(onlyPid);
+        const auto largeDrmSessionMarkedClient = createTestClient(onlyPid);
+        const auto largestDrmSessionActiveClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createDrmSessionResource(1));
+            mService->addResource(onlyPid, onlyUid, getId(smallDrmSessionMarkedClient),
+                    smallDrmSessionMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createDrmSessionResource(2));
+            mService->addResource(onlyPid, onlyUid, getId(largeDrmSessionMarkedClient),
+                    largeDrmSessionMarkedClient, resources);
+            resources.clear();
+            resources.push_back(createDrmSessionResource(3));
+            mService->addResource(onlyPid, onlyUid, getId(largestDrmSessionActiveClient),
+                    largestDrmSessionActiveClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(smallDrmSessionMarkedClient));
+        mService->markClientForPendingRemoval(onlyPid, getId(largeDrmSessionMarkedClient));
+        // don't mark the largest client
+
+        // battery
+        const auto batteryMarkedClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createBatteryResource());
+            mService->addResource(onlyPid, onlyUid, getId(batteryMarkedClient),
+                    batteryMarkedClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(batteryMarkedClient));
+
+        // CPU boost
+        const auto cpuBoostMarkedClient = createTestClient(onlyPid);
+        {
+            std::vector<MediaResourceParcel> resources;
+            resources.push_back(createCpuBoostResource());
+            mService->addResource(onlyPid, onlyUid, getId(cpuBoostMarkedClient),
+                    cpuBoostMarkedClient, resources);
+        }
+        mService->markClientForPendingRemoval(onlyPid, getId(cpuBoostMarkedClient));
+
+        // now we expect that we only reclaim resources from the biggest marked client
+        EXPECT_TRUE(mService->reclaimResourcesFromClientsPendingRemoval(onlyPid).isOk());
+        // secure video codec
+        EXPECT_FALSE(toTestClient(smallSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestSecureVideoActiveClient)->checkIfReclaimedAndReset());
+        // non-secure video codec
+        EXPECT_FALSE(toTestClient(smallNonSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeNonSecureVideoMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestNonSecureVideoActiveClient)->checkIfReclaimedAndReset());
+        // secure audio codec
+        EXPECT_FALSE(toTestClient(smallSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestSecureAudioActiveClient)->checkIfReclaimedAndReset());
+        // non-secure audio codec
+        EXPECT_FALSE(toTestClient(smallNonSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeNonSecureAudioMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestNonSecureAudioActiveClient)->checkIfReclaimedAndReset());
+        // secure image codec
+        EXPECT_FALSE(toTestClient(smallSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestSecureImageActiveClient)->checkIfReclaimedAndReset());
+        // non-secure image codec
+        EXPECT_FALSE(toTestClient(smallNonSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeNonSecureImageMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestNonSecureImageActiveClient)->checkIfReclaimedAndReset());
+        // graphic memory
+        EXPECT_FALSE(toTestClient(smallGraphicMemoryMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeGraphicMemoryMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestGraphicMemoryActiveClient)->checkIfReclaimedAndReset());
+        // DRM session
+        EXPECT_FALSE(toTestClient(smallDrmSessionMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_TRUE(toTestClient(largeDrmSessionMarkedClient)->checkIfReclaimedAndReset());
+        EXPECT_FALSE(toTestClient(largestDrmSessionActiveClient)->checkIfReclaimedAndReset());
+        // battery is not expected to be reclaimed when marked as pending removal
+        EXPECT_FALSE(toTestClient(batteryMarkedClient)->checkIfReclaimedAndReset());
+        // CPU boost is not expected to be reclaimed when marked as pending removal
+        EXPECT_FALSE(toTestClient(cpuBoostMarkedClient)->checkIfReclaimedAndReset());
+    }
 };
 
 TEST_F(ResourceManagerServiceTest, config) {
@@ -768,19 +1203,15 @@
     testGetLowestPriorityPid();
 }
 
-TEST_F(ResourceManagerServiceTest, getBiggestClient_l) {
-    testGetBiggestClient();
-}
-
 TEST_F(ResourceManagerServiceTest, isCallingPriorityHigher_l) {
     testIsCallingPriorityHigher();
 }
 
-TEST_F(ResourceManagerServiceTest, testBatteryStats) {
+TEST_F(ResourceManagerServiceTest, batteryStats) {
     testBatteryStats();
 }
 
-TEST_F(ResourceManagerServiceTest, testCpusetBoost) {
+TEST_F(ResourceManagerServiceTest, cpusetBoost) {
     testCpusetBoost();
 }
 
@@ -792,4 +1223,25 @@
     testMarkClientForPendingRemoval();
 }
 
+TEST_F(ResourceManagerServiceTest, reclaimResources_withVideoCodec_reclaimsOnlyVideoCodec) {
+    testReclaimResources_withVideoCodec_reclaimsOnlyVideoCodec();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResources_withAudioCodec_reclaimsOnlyAudioCodec) {
+    testReclaimResources_withAudioCodec_reclaimsOnlyAudioCodec();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResources_withImageCodec_reclaimsOnlyImageCodec) {
+    testReclaimResources_withImageCodec_reclaimsOnlyImageCodec();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResources_whenPartialResourceMatch_reclaims) {
+    testReclaimResources_whenPartialResourceMatch_reclaims();
+}
+
+TEST_F(ResourceManagerServiceTest,
+        reclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources) {
+    testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
+}
+
 } // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index acd9df1..003569d 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -116,6 +116,26 @@
 
 const EventTracker::Event EventTracker::NoEvent;
 
+static MediaResource createSecureVideoCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kSecureCodec,
+        MediaResource::SubType::kVideoCodec, amount);
+}
+
+static MediaResource createNonSecureVideoCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kNonSecureCodec,
+        MediaResource::SubType::kVideoCodec, amount);
+}
+
+static MediaResource createSecureAudioCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kSecureCodec,
+        MediaResource::SubType::kAudioCodec, amount);
+}
+
+static MediaResource createNonSecureAudioCodecResource(int amount = 1) {
+    return MediaResource(MediaResource::Type::kNonSecureCodec,
+        MediaResource::SubType::kAudioCodec, amount);
+}
+
 // Operators for GTest macros.
 bool operator==(const EventTracker::Event& lhs, const EventTracker::Event& rhs) {
     return lhs.type == rhs.type && lhs.uid == rhs.uid && lhs.pid == rhs.pid &&
@@ -233,30 +253,30 @@
 
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource()};
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
 
     // Add non-secure video codec.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
 
     // Add secure & non-secure video codecs.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
 
     // Add additional audio codecs, should be ignored.
-    resources.push_back(MediaResource::CodecResource(1 /*secure*/, 0 /*video*/));
-    resources.push_back(MediaResource::CodecResource(0 /*secure*/, 0 /*video*/));
+    resources.push_back(createSecureAudioCodecResource());
+    resources.push_back(createNonSecureAudioCodecResource());
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables2));
@@ -276,9 +296,9 @@
 
     // Add multiple secure & non-secure video codecs.
     // Multiple entries of the same type should be merged, count should be propagated correctly.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 3 /*count*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(3)};
     observables1 = {{MediaObservableType::kVideoSecureCodec, 2}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 3}};
     observables3 = {{MediaObservableType::kVideoSecureCodec, 2},
@@ -300,7 +320,7 @@
 
     std::vector<MediaResourceParcel> resources;
     // Add secure video codec to client1.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource()};
     mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid1, kTestPid1, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
@@ -322,7 +342,7 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add non-secure video codec to client2.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
@@ -344,24 +364,24 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
 
     // Add secure & non-secure video codecs, plus audio codecs (that's ignored).
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(),
+                 createSecureAudioCodecResource(),
+                 createNonSecureAudioCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables2));
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, should have no event.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 0 /*video*/)};
+    resources = {createSecureAudioCodecResource()};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::NoEvent);
     // Remove the other audio codec and the secure video codec, only secure video codec
     // removal should be reported.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/)};
+    resources = {createNonSecureAudioCodecResource(),
+                 createSecureVideoCodecResource()};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Idle(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
@@ -386,10 +406,10 @@
 
     // Add multiple secure & non-secure video codecs, plus audio codecs (that's ignored).
     // (ResourceManager will merge these internally.)
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 4 /*count*/),
-                 MediaResource::CodecResource(1 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 0 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(4),
+                 createSecureAudioCodecResource(),
+                 createNonSecureAudioCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 4}};
@@ -400,10 +420,10 @@
     EXPECT_EQ(mTestObserver3->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables3));
     // Remove one audio codec, 2 secure video codecs and 2 non-secure video codecs.
     // 1 secure video codec removal and 2 non-secure video codec removals should be reported.
-    resources = {MediaResource::CodecResource(0 /*secure*/, 0 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/, 2 /*count*/)};
+    resources = {createNonSecureAudioCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource(2)};
     mService->removeResource(kTestPid2, getId(mTestClient3), resources);
     observables1 = {{MediaObservableType::kVideoSecureCodec, 1}};
     observables2 = {{MediaObservableType::kVideoNonSecureCodec, 2}};
@@ -443,8 +463,8 @@
     std::vector<MediaResourceParcel> resources;
 
     // Add secure & non-secure video codecs.
-    resources = {MediaResource::CodecResource(1 /*secure*/, 1 /*video*/),
-                 MediaResource::CodecResource(0 /*secure*/, 1 /*video*/)};
+    resources = {createSecureVideoCodecResource(),
+                 createNonSecureVideoCodecResource()};
     mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources);
     EXPECT_EQ(mTestObserver1->pop(), EventTracker::Busy(kTestUid2, kTestPid2, observables1));
     EXPECT_EQ(mTestObserver2->pop(), EventTracker::NoEvent);
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index a9fd14f..fa5eb4e 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -47,7 +47,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
@@ -80,7 +80,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index cb180ec..ae13656 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -34,8 +34,8 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
-        "resourcemanager_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
+        "resourcemanager_aidl_interface-ndk",
         "libmediatranscodingservice",
     ],
 
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 0cb2fad..8e17f55 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -259,9 +259,7 @@
 static constexpr bool success = true;
 static constexpr bool fail = false;
 
-struct TestClientCallback : public BnTranscodingClientCallback,
-                            public EventTracker,
-                            public std::enable_shared_from_this<TestClientCallback> {
+struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
     TestClientCallback(const char* packageName, int32_t id)
           : mClientId(id), mClientPid(PID(id)), mClientUid(UID(id)), mPackageName(packageName) {
         ALOGI("TestClientCallback %d created: pid %d, uid %d", id, PID(id), UID(id));
@@ -348,8 +346,8 @@
         ALOGD("registering %s with uid %d", packageName, mClientUid);
 
         std::shared_ptr<ITranscodingClient> client;
-        Status status =
-                service->registerClient(shared_from_this(), kClientName, packageName, &client);
+        Status status = service->registerClient(ref<TestClientCallback>(), kClientName, packageName,
+                                                &client);
 
         mClient = status.isOk() ? client : nullptr;
         return status;
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index 3a89e12..decc5fe 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -28,17 +28,11 @@
     defaults: ["libavservices_minijail_defaults"],
     vendor_available: true,
     min_sdk_version: "29",
-    export_include_dirs: ["."],
-}
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
 
-// By adding "vendor_available: true" to "libavservices_minijail", we don't
-// need to have "libavservices_minijail_vendor" any longer.
-// "libavservices_minijail_vendor" will be removed, once we replace it with
-// "libavservices_minijail" in all vendor modules. (b/146313710)
-cc_library_shared {
-    name: "libavservices_minijail_vendor",
-    vendor: true,
-    defaults: ["libavservices_minijail_defaults"],
     export_include_dirs: ["."],
 }
 
diff --git a/services/minijail/OWNERS b/services/minijail/OWNERS
index 19f4f9f..9ebf41e 100644
--- a/services/minijail/OWNERS
+++ b/services/minijail/OWNERS
@@ -1,2 +1,2 @@
 jorgelo@google.com
-marcone@google.com
+essick@google.com
diff --git a/services/oboeservice/AAudioCommandQueue.cpp b/services/oboeservice/AAudioCommandQueue.cpp
new file mode 100644
index 0000000..9bd18b3
--- /dev/null
+++ b/services/oboeservice/AAudioCommandQueue.cpp
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AAudioCommandQueue"
+//#define LOG_NDEBUG 0
+
+#include <chrono>
+
+#include <utils/Log.h>
+
+#include "AAudioCommandQueue.h"
+
+namespace aaudio {
+
+aaudio_result_t AAudioCommandQueue::sendCommand(std::shared_ptr<AAudioCommand> command) {
+    {
+        std::scoped_lock<std::mutex> _l(mLock);
+        if (!mRunning) {
+            ALOGE("Tried to send command while it was not running");
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        mCommands.push(command);
+        mWaitWorkCond.notify_one();
+    }
+
+    std::unique_lock _cl(command->lock);
+    android::base::ScopedLockAssertion lockAssertion(command->lock);
+    ALOGV("Sending command %d, wait for reply(%d) with timeout %jd",
+           command->operationCode, command->isWaitingForReply, command->timeoutNanoseconds);
+    // `mWaitForReply` is first initialized when the command is constructed. It will be flipped
+    // when the command is completed.
+    auto timeoutExpire = std::chrono::steady_clock::now()
+            + std::chrono::nanoseconds(command->timeoutNanoseconds);
+    while (command->isWaitingForReply) {
+        if (command->conditionVariable.wait_until(_cl, timeoutExpire)
+                == std::cv_status::timeout) {
+            ALOGD("Command %d time out", command->operationCode);
+            command->result = AAUDIO_ERROR_TIMEOUT;
+            command->isWaitingForReply = false;
+        }
+    }
+    ALOGV("Command %d sent with result as %d", command->operationCode, command->result);
+    return command->result;
+}
+
+std::shared_ptr<AAudioCommand> AAudioCommandQueue::waitForCommand(int64_t timeoutNanos) {
+    std::shared_ptr<AAudioCommand> command;
+    {
+        std::unique_lock _l(mLock);
+        android::base::ScopedLockAssertion lockAssertion(mLock);
+        if (timeoutNanos >= 0) {
+            mWaitWorkCond.wait_for(_l, std::chrono::nanoseconds(timeoutNanos), [this]() {
+                android::base::ScopedLockAssertion lockAssertion(mLock);
+                return !mRunning || !mCommands.empty();
+            });
+        } else {
+            mWaitWorkCond.wait(_l, [this]() {
+                android::base::ScopedLockAssertion lockAssertion(mLock);
+                return !mRunning || !mCommands.empty();
+            });
+        }
+        if (!mCommands.empty() && mRunning) {
+            command = mCommands.front();
+            mCommands.pop();
+        }
+    }
+    return command;
+}
+
+void AAudioCommandQueue::startWaiting() {
+    std::scoped_lock<std::mutex> _l(mLock);
+    mRunning = true;
+}
+
+void AAudioCommandQueue::stopWaiting() {
+    std::scoped_lock<std::mutex> _l(mLock);
+    mRunning = false;
+    // Clear all commands in the queue as the command thread is stopped.
+    while (!mCommands.empty()) {
+        auto command = mCommands.front();
+        mCommands.pop();
+        std::scoped_lock<std::mutex> _cl(command->lock);
+        // If the command is waiting for result, returns AAUDIO_ERROR_INVALID_STATE
+        // as there is no thread waiting for the command.
+        if (command->isWaitingForReply) {
+            command->result = AAUDIO_ERROR_INVALID_STATE;
+            command->isWaitingForReply = false;
+            command->conditionVariable.notify_one();
+        }
+    }
+    mWaitWorkCond.notify_one();
+}
+
+} // namespace aaudio
\ No newline at end of file
diff --git a/services/oboeservice/AAudioCommandQueue.h b/services/oboeservice/AAudioCommandQueue.h
new file mode 100644
index 0000000..64442a3
--- /dev/null
+++ b/services/oboeservice/AAudioCommandQueue.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <condition_variable>
+#include <memory>
+#include <mutex>
+#include <queue>
+
+#include <aaudio/AAudio.h>
+#include <android-base/thread_annotations.h>
+
+namespace aaudio {
+
+typedef int32_t aaudio_command_opcode;
+
+class AAudioCommandParam {
+public:
+    AAudioCommandParam() = default;
+    virtual ~AAudioCommandParam() = default;
+};
+
+class AAudioCommand {
+public:
+    explicit AAudioCommand(
+            aaudio_command_opcode opCode, std::shared_ptr<AAudioCommandParam> param = nullptr,
+            bool waitForReply = false, int64_t timeoutNanos = 0)
+            : operationCode(opCode), parameter(param), isWaitingForReply(waitForReply),
+              timeoutNanoseconds(timeoutNanos) { }
+    virtual ~AAudioCommand() = default;
+
+    std::mutex lock;
+    std::condition_variable conditionVariable;
+
+    const aaudio_command_opcode operationCode;
+    std::shared_ptr<AAudioCommandParam> parameter;
+    bool isWaitingForReply GUARDED_BY(lock);
+    const int64_t timeoutNanoseconds;
+    aaudio_result_t result GUARDED_BY(lock) = AAUDIO_OK;
+};
+
+class AAudioCommandQueue {
+public:
+    AAudioCommandQueue() = default;
+    ~AAudioCommandQueue() = default;
+
+    /**
+     * Send a command to the command queue. The return will be waiting for a specified timeout
+     * period indicated by the command if it is required.
+     *
+     * @param command the command to send to the command queue.
+     * @return the result of sending the command or the result of executing the command if command
+     *         need to wait for a reply. If timeout happens, AAUDIO_ERROR_TIMEOUT will be returned.
+     */
+    aaudio_result_t sendCommand(std::shared_ptr<AAudioCommand> command);
+
+    /**
+     * Wait for next available command OR until the timeout is expired.
+     *
+     * @param timeoutNanos the maximum time to wait for next command (0 means return immediately in
+     *                     any case), negative to wait forever.
+     * @return the next available command if any or a nullptr when there is none.
+     */
+    std::shared_ptr<AAudioCommand> waitForCommand(int64_t timeoutNanos = -1);
+
+    /**
+     * Start waiting for commands. Commands can only be pushed into the command queue after it
+     * starts waiting.
+     */
+    void startWaiting();
+
+    /**
+     * Force stop waiting for next command
+     */
+    void stopWaiting();
+
+private:
+    std::mutex mLock;
+    std::condition_variable mWaitWorkCond;
+
+    std::queue<std::shared_ptr<AAudioCommand>> mCommands GUARDED_BY(mLock);
+    bool mRunning GUARDED_BY(mLock) = false;
+};
+
+} // namespace aaudio
\ No newline at end of file
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 40a664e..2679b2e 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -280,6 +280,22 @@
     AIDL_RETURN(serviceStream->unregisterAudioThread(clientThreadId));
 }
 
+Status AAudioService::exitStandby(int32_t streamHandle, Endpoint* endpoint, int32_t *_aidl_return) {
+    static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
+
+    sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+    if (serviceStream.get() == nullptr) {
+        ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
+        AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
+    }
+    AudioEndpointParcelable endpointParcelable;
+    aaudio_result_t result = serviceStream->exitStandby(&endpointParcelable);
+    if (result == AAUDIO_OK) {
+        *endpoint = std::move(endpointParcelable).parcelable();
+    }
+    AIDL_RETURN(result);
+}
+
 bool AAudioService::isCallerInService() {
     pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAudioClient.attributionSource.pid));
     uid_t clientUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index 7c1b796..0a111fb 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -82,6 +82,9 @@
     binder::Status unregisterAudioThread(int32_t streamHandle, int32_t clientThreadId,
                                          int32_t* _aidl_return) override;
 
+    binder::Status exitStandby(int32_t streamHandle, ::aaudio::Endpoint* endpoint,
+                               int32_t* _aidl_return) override;
+
     aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
                                 const android::AudioClient& client,
                                 const audio_attributes_t *attr,
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 390cd5c..b55b601 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -60,7 +60,8 @@
     result << "    Sample Rate:          " << getSampleRate() << "\n";
     result << "    Channel Count:        " << getSamplesPerFrame() << "\n";
     result << "    Channel Mask:         0x" << std::hex << getChannelMask() << std::dec << "\n";
-    result << "    Format:               " << getFormat() << "\n";
+    result << "    Format:               " << getFormat()
+                                           << " (" << audio_format_to_string(getFormat()) << ")\n";
     result << "    Frames Per Burst:     " << mFramesPerBurst << "\n";
     result << "    Usage:                " << getUsage() << "\n";
     result << "    ContentType:          " << getContentType() << "\n";
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index a7f63d3..92004c5 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -77,6 +77,16 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
+    virtual aaudio_result_t standby() {
+        ALOGD("AAudioServiceEndpoint::standby() AAUDIO_ERROR_UNAVAILABLE");
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    virtual aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) {
+        ALOGD("AAudioServiceEndpoint::exitStandby() AAUDIO_ERROR_UNAVAILABLE");
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
     /**
      * @param positionFrames
      * @param timeNanos
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index bc769f0..95bd4bb 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -66,8 +66,7 @@
                 getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
             ALOGD("%s() read() returned AAUDIO_ERROR_DISCONNECTED", __func__);
-            // We do not need the returned vector.
-            (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
+            AAudioServiceEndpointShared::handleDisconnectRegisteredStreamsAsync();
             break;
         } else if (result != getFramesPerBurst()) {
             ALOGW("callbackLoop() read %d / %d",
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 35a0890..3f18b95 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -80,16 +80,16 @@
 
     audio_format_t audioFormat = getFormat();
 
-    // FLOAT is not directly supported by the HAL so ask for a 32-bit.
-    if (audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
-        // TODO remove these logs when finished debugging.
-        ALOGD("%s() change format from %d to 32_BIT", __func__, audioFormat);
-        audioFormat = AUDIO_FORMAT_PCM_32_BIT;
-    }
-
     result = openWithFormat(audioFormat);
     if (result == AAUDIO_OK) return result;
 
+    if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
+        ALOGD("%s() FLOAT failed, perhaps due to format. Try again with 32_BIT", __func__);
+        audioFormat = AUDIO_FORMAT_PCM_32_BIT;
+        result = openWithFormat(audioFormat);
+    }
+    if (result == AAUDIO_OK) return result;
+
     if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_32_BIT) {
         ALOGD("%s() 32_BIT failed, perhaps due to format. Try again with 24_BIT_PACKED", __func__);
         audioFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
@@ -183,40 +183,14 @@
             ? AAUDIO_SESSION_ID_NONE
             : (aaudio_session_id_t) sessionId;
     setSessionId(actualSessionId);
-    ALOGD("%s() deviceId = %d, sessionId = %d", __func__, getDeviceId(), getSessionId());
+
+    ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
+          __func__, audioFormat, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
-    int32_t minSizeFrames = getBufferCapacity();
-    if (minSizeFrames <= 0) { // zero will get rejected
-        minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
-    }
-    status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
-    bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
-    if (status != OK) {
-        ALOGE("%s() - createMmapBuffer() failed with status %d %s",
-              __func__, status, strerror(-status));
-        result = AAUDIO_ERROR_UNAVAILABLE;
+    result = createMmapBuffer(&mAudioDataFileDescriptor);
+    if (result != AAUDIO_OK) {
         goto error;
-    } else {
-        ALOGD("%s() createMmapBuffer() buffer_size = %d fr, burst_size %d fr"
-                      ", Sharable FD: %s",
-              __func__,
-              mMmapBufferinfo.buffer_size_frames,
-              mMmapBufferinfo.burst_size_frames,
-              isBufferShareable ? "Yes" : "No");
-    }
-
-    setBufferCapacity(mMmapBufferinfo.buffer_size_frames);
-    if (!isBufferShareable) {
-        // Exclusive mode can only be used by the service because the FD cannot be shared.
-        int32_t audioServiceUid =
-            VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
-        if ((mMmapClient.attributionSource.uid != audioServiceUid) &&
-            getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
-            ALOGW("%s() - exclusive FD cannot be used by client", __func__);
-            result = AAUDIO_ERROR_UNAVAILABLE;
-            goto error;
-        }
     }
 
     // Get information about the stream and pass it back to the caller.
@@ -224,30 +198,24 @@
             config.channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
             AAudio_isChannelIndexMask(config.channel_mask)));
 
-    // AAudio creates a copy of this FD and retains ownership of the copy.
-    // Assume that AudioFlinger will close the original shared_memory_fd.
-    mAudioDataFileDescriptor.reset(dup(mMmapBufferinfo.shared_memory_fd));
-    if (mAudioDataFileDescriptor.get() == -1) {
-        ALOGE("%s() - could not dup shared_memory_fd", __func__);
-        result = AAUDIO_ERROR_INTERNAL;
-        goto error;
-    }
-    // Call to HAL to make sure the transport FD was able to be closed by binder.
-    // This is a tricky workaround for a problem in Binder.
-    // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
-    struct audio_mmap_position position;
-    mMmapStream->getMmapPosition(&position);
-
-    mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
     setFormat(config.format);
     setSampleRate(config.sample_rate);
 
-    ALOGD("%s() actual rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
+    // If the position is not updated while the timestamp is updated for more than a certain amount,
+    // the timestamp reported from the HAL may not be accurate. Here, a timestamp grace period is
+    // set as 5 burst size. We may want to update this value if there is any report from OEMs saying
+    // that is too short.
+    static constexpr int kTimestampGraceBurstCount = 5;
+    mTimestampGracePeriodMs = ((int64_t) kTimestampGraceBurstCount * mFramesPerBurst
+            * AAUDIO_MILLIS_PER_SECOND) / getSampleRate();
+
+    ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
           __func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
           deviceId, getBufferCapacity());
 
-    ALOGD("%s() format = 0x%08x, frame size = %d, burst size = %d",
-          __func__, getFormat(), calculateBytesPerFrame(), mFramesPerBurst);
+    ALOGD("%s() got format = 0x%X = %s, frame size = %d, burst size = %d",
+          __func__, getFormat(), audio_format_to_string(getFormat()),
+          calculateBytesPerFrame(), mFramesPerBurst);
 
     return result;
 
@@ -315,6 +283,32 @@
     return result;
 }
 
+aaudio_result_t AAudioServiceEndpointMMAP::standby() {
+    if (mMmapStream == nullptr) {
+        return AAUDIO_ERROR_NULL;
+    }
+    aaudio_result_t result = AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    return result;
+}
+
+aaudio_result_t AAudioServiceEndpointMMAP::exitStandby(AudioEndpointParcelable* parcelable) {
+    if (mMmapStream == nullptr) {
+        return AAUDIO_ERROR_NULL;
+    }
+    mAudioDataFileDescriptor.reset();
+    aaudio_result_t result = createMmapBuffer(&mAudioDataFileDescriptor);
+    if (result == AAUDIO_OK) {
+        int32_t bytesPerFrame = calculateBytesPerFrame();
+        int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
+        int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
+        parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
+        parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
+        parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
+        parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+    }
+    return result;
+}
+
 // Get free-running DSP or DMA hardware position from the HAL.
 aaudio_result_t AAudioServiceEndpointMMAP::getFreeRunningPosition(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
@@ -401,30 +395,150 @@
 /**
  * Get an immutable description of the data queue from the HAL.
  */
-aaudio_result_t AAudioServiceEndpointMMAP::getDownDataDescription(AudioEndpointParcelable &parcelable)
+aaudio_result_t AAudioServiceEndpointMMAP::getDownDataDescription(
+        AudioEndpointParcelable* parcelable)
 {
     // Gather information on the data queue based on HAL info.
     int32_t bytesPerFrame = calculateBytesPerFrame();
     int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
-    int fdIndex = parcelable.addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
-    parcelable.mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
-    parcelable.mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
-    parcelable.mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
-    parcelable.mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+    int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
+    parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
+    parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
+    parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
+    parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
     return AAUDIO_OK;
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
                                                                int64_t *timeNanos)
 {
-    if (!mExternalPositionSupported) {
-        return AAUDIO_ERROR_INVALID_STATE;
+    if (mHalExternalPositionStatus != AAUDIO_OK) {
+        return mHalExternalPositionStatus;
     }
-    status_t status = mMmapStream->getExternalPosition(positionFrames, timeNanos);
-    if (status == INVALID_OPERATION) {
-        // getExternalPosition is not supported. Set mExternalPositionSupported as false
+    uint64_t tempPositionFrames;
+    int64_t tempTimeNanos;
+    status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
+    if (status != OK) {
+        // getExternalPosition reports error. The HAL may not support the API. Cache the result
         // so that the call will not go to the HAL next time.
-        mExternalPositionSupported = false;
+        mHalExternalPositionStatus = AAudioConvert_androidToAAudioResult(status);
+        return mHalExternalPositionStatus;
     }
-    return AAudioConvert_androidToAAudioResult(status);
+
+    // If the HAL keeps reporting the same position or timestamp, the HAL may be having some issues
+    // to report correct external position. In that case, we will not trust the values reported from
+    // the HAL. Ideally, we may want to stop querying external position if the HAL cannot report
+    // correct position within a period. But it may not be a good idea to get system time too often.
+    // In that case, a maximum number of frozen external position is defined so that if the
+    // count of the same timestamp or position is reported by the HAL continuously, the values from
+    // the HAL will no longer be trusted.
+    static constexpr int kMaxFrozenCount = 20;
+    // If the HAL version is less than 7.0, the getPresentationPosition is an optional API.
+    // If the HAL version is 7.0 or later, the getPresentationPosition is a mandatory API.
+    // In that case, even the returned status is NO_ERROR, it doesn't indicate the returned
+    // position is a valid one. Do a simple validation, which is checking if the position is
+    // forward within half a second or not, here so that this function can return error if
+    // the validation fails. Note that we don't only apply this validation logic to HAL API
+    // less than 7.0. The reason is that there is a chance the HAL is not reporting the
+    // timestamp and position correctly.
+    if (mLastPositionFrames > tempPositionFrames) {
+        // If the position is going backwards, there must be something wrong with the HAL.
+        // In that case, we do not trust the values reported by the HAL.
+        ALOGW("%s position is going backwards, last position(%jd) current position(%jd)",
+              __func__, mLastPositionFrames, tempPositionFrames);
+        mHalExternalPositionStatus = AAUDIO_ERROR_INTERNAL;
+        return mHalExternalPositionStatus;
+    } else if (mLastPositionFrames == tempPositionFrames) {
+        if (tempTimeNanos - mTimestampNanosForLastPosition >
+                AAUDIO_NANOS_PER_MILLISECOND * mTimestampGracePeriodMs) {
+            ALOGW("%s, the reported position is not changed within %d msec. "
+                  "Set the external position as not supported", __func__, mTimestampGracePeriodMs);
+            mHalExternalPositionStatus = AAUDIO_ERROR_INTERNAL;
+            return mHalExternalPositionStatus;
+        }
+        mFrozenPositionCount++;
+    } else {
+        mFrozenPositionCount = 0;
+    }
+
+    if (mTimestampNanosForLastPosition > tempTimeNanos) {
+        // If the timestamp is going backwards, there must be something wrong with the HAL.
+        // In that case, we do not trust the values reported by the HAL.
+        ALOGW("%s timestamp is going backwards, last timestamp(%jd), current timestamp(%jd)",
+              __func__, mTimestampNanosForLastPosition, tempTimeNanos);
+        mHalExternalPositionStatus = AAUDIO_ERROR_INTERNAL;
+        return mHalExternalPositionStatus;
+    } else if (mTimestampNanosForLastPosition == tempTimeNanos) {
+        mFrozenTimestampCount++;
+    } else {
+        mFrozenTimestampCount = 0;
+    }
+
+    if (mFrozenTimestampCount + mFrozenPositionCount > kMaxFrozenCount) {
+        ALOGW("%s too many frozen external position from HAL.", __func__);
+        mHalExternalPositionStatus = AAUDIO_ERROR_INTERNAL;
+        return mHalExternalPositionStatus;
+    }
+
+    mLastPositionFrames = tempPositionFrames;
+    mTimestampNanosForLastPosition = tempTimeNanos;
+
+    // Only update the timestamp and position when they looks valid.
+    *positionFrames = tempPositionFrames;
+    *timeNanos = tempTimeNanos;
+    return mHalExternalPositionStatus;
+}
+
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer(
+        android::base::unique_fd* fileDescriptor)
+{
+    memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
+    int32_t minSizeFrames = getBufferCapacity();
+    if (minSizeFrames <= 0) { // zero will get rejected
+        minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
+    }
+    status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
+    bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
+    if (status != OK) {
+        ALOGE("%s() - createMmapBuffer() failed with status %d %s",
+              __func__, status, strerror(-status));
+        return AAUDIO_ERROR_UNAVAILABLE;
+    } else {
+        ALOGD("%s() createMmapBuffer() buffer_size = %d fr, burst_size %d fr"
+                      ", Sharable FD: %s",
+              __func__,
+              mMmapBufferinfo.buffer_size_frames,
+              mMmapBufferinfo.burst_size_frames,
+              isBufferShareable ? "Yes" : "No");
+    }
+
+    setBufferCapacity(mMmapBufferinfo.buffer_size_frames);
+    if (!isBufferShareable) {
+        // Exclusive mode can only be used by the service because the FD cannot be shared.
+        int32_t audioServiceUid =
+            VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+        if ((mMmapClient.attributionSource.uid != audioServiceUid) &&
+            getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
+            ALOGW("%s() - exclusive FD cannot be used by client", __func__);
+            return AAUDIO_ERROR_UNAVAILABLE;
+        }
+    }
+
+    // AAudio creates a copy of this FD and retains ownership of the copy.
+    // Assume that AudioFlinger will close the original shared_memory_fd.
+    fileDescriptor->reset(dup(mMmapBufferinfo.shared_memory_fd));
+    if (fileDescriptor->get() == -1) {
+        ALOGE("%s() - could not dup shared_memory_fd", __func__);
+        return AAUDIO_ERROR_INTERNAL;
+    }
+
+    // Call to HAL to make sure the transport FD was able to be closed by binder.
+    // This is a tricky workaround for a problem in Binder.
+    // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+    struct audio_mmap_position position;
+    mMmapStream->getMmapPosition(&position);
+
+    mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
+
+    return AAUDIO_OK;
 }
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 5a53885..3e7f2c7 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -64,6 +64,10 @@
 
     aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override;
 
+    aaudio_result_t standby() override;
+
+    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override;
+
     aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
 
     aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
@@ -79,7 +83,7 @@
     void onRoutingChanged(audio_port_handle_t portHandle) override;
     // ------------------------------------------------------------------------------
 
-    aaudio_result_t getDownDataDescription(AudioEndpointParcelable &parcelable);
+    aaudio_result_t getDownDataDescription(AudioEndpointParcelable* parcelable);
 
     int64_t getHardwareTimeOffsetNanos() const {
         return mHardwareTimeOffsetNanos;
@@ -91,6 +95,8 @@
 
     aaudio_result_t openWithFormat(audio_format_t audioFormat);
 
+    aaudio_result_t createMmapBuffer(android::base::unique_fd* fileDescriptor);
+
     MonotonicCounter                          mFramesTransferred;
 
     // Interface to the AudioFlinger MMAP support.
@@ -106,7 +112,12 @@
 
     int64_t                                   mHardwareTimeOffsetNanos = 0; // TODO get from HAL
 
-    bool                                      mExternalPositionSupported = true;
+    aaudio_result_t                           mHalExternalPositionStatus = AAUDIO_OK;
+    uint64_t                                  mLastPositionFrames = 0;
+    int64_t                                   mTimestampNanosForLastPosition = 0;
+    int32_t                                   mTimestampGracePeriodMs;
+    int32_t                                   mFrozenPositionCount = 0;
+    int32_t                                   mFrozenTimestampCount = 0;
 
 };
 
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 4e46033..2a5939f 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -21,6 +21,7 @@
 #include <assert.h>
 #include <map>
 #include <mutex>
+#include <media/AudioSystem.h>
 #include <utils/Singleton.h>
 
 #include "AAudioEndpointManager.h"
@@ -51,7 +52,7 @@
         mMixer.allocate(getStreamInternal()->getSamplesPerFrame(),
                         getStreamInternal()->getFramesPerBurst());
 
-        int32_t burstsPerBuffer = AAudioProperty_getMixerBursts();
+        int32_t burstsPerBuffer = AudioSystem::getAAudioMixerBurstCount();
         if (burstsPerBuffer == 0) {
             mLatencyTuningEnabled = true;
             burstsPerBuffer = BURSTS_PER_BUFFER_DEFAULT;
@@ -146,8 +147,7 @@
                                             getFramesPerBurst(), timeoutNanos);
         if (result == AAUDIO_ERROR_DISCONNECTED) {
             ALOGD("%s() write() returned AAUDIO_ERROR_DISCONNECTED", __func__);
-            // We do not need the returned vector.
-            (void) AAudioServiceEndpointShared::disconnectRegisteredStreams();
+            AAudioServiceEndpointShared::handleDisconnectRegisteredStreamsAsync();
             break;
         } else if (result != getFramesPerBurst()) {
             ALOGW("callbackLoop() wrote %d / %d",
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 5af0a91..dd421fe 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -214,3 +214,12 @@
     }
     return result;
 }
+
+void AAudioServiceEndpointShared::handleDisconnectRegisteredStreamsAsync() {
+    android::sp<AAudioServiceEndpointShared> holdEndpoint(this);
+    std::thread asyncTask([holdEndpoint]() {
+        // We do not need the returned vector.
+        holdEndpoint->disconnectRegisteredStreams();
+    });
+    asyncTask.detach();
+}
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index 8357567..3e760c4 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -69,6 +69,8 @@
 
     aaudio_result_t          stopSharingThread();
 
+    void                     handleDisconnectRegisteredStreamsAsync();
+
     // An MMAP stream that is shared by multiple clients.
     android::sp<AudioStreamInternal> mStreamInternal;
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 4ffc127..9f48f80 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -34,20 +34,23 @@
 #include "AAudioService.h"
 #include "AAudioServiceEndpoint.h"
 #include "AAudioServiceStreamBase.h"
-#include "TimestampScheduler.h"
 
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
 using content::AttributionSourceState;
 
+static const int64_t TIMEOUT_NANOS = 3LL * 1000 * 1000 * 1000;
+// If the stream is idle for more than `IDLE_TIMEOUT_NANOS`, the stream will be put into standby.
+static const int64_t IDLE_TIMEOUT_NANOS = 3e9;
+
 /**
  * Base class for streams in the service.
  * @return
  */
 
 AAudioServiceStreamBase::AAudioServiceStreamBase(AAudioService &audioService)
-        : mTimestampThread("AATime")
+        : mCommandThread("AACommand")
         , mAtomicStreamTimestamp()
         , mAudioService(audioService) {
     mMmapClient.attributionSource = AttributionSourceState();
@@ -70,6 +73,13 @@
                         || getState() == AAUDIO_STREAM_STATE_UNINITIALIZED),
                         "service stream %p still open, state = %d",
                         this, getState());
+
+    // Stop the command thread before destroying.
+    if (mThreadEnabled) {
+        mThreadEnabled = false;
+        mCommandQueue.stopWaiting();
+        mCommandThread.stop();
+    }
 }
 
 std::string AAudioServiceStreamBase::dumpHeader() {
@@ -166,16 +176,36 @@
         mFramesPerBurst = mServiceEndpoint->getFramesPerBurst();
         copyFrom(*mServiceEndpoint);
     }
+
+    // Make sure this object does not get deleted before the run() method
+    // can protect it by making a strong pointer.
+    mCommandQueue.startWaiting();
+    mThreadEnabled = true;
+    incStrong(nullptr); // See run() method.
+    result = mCommandThread.start(this);
+    if (result != AAUDIO_OK) {
+        decStrong(nullptr); // run() can't do it so we have to do it here.
+        goto error;
+    }
     return result;
 
 error:
-    close();
+    closeAndClear();
+    mThreadEnabled = false;
+    mCommandQueue.stopWaiting();
+    mCommandThread.stop();
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close() {
-    std::lock_guard<std::mutex> lock(mLock);
-    return close_l();
+    aaudio_result_t result = sendCommand(CLOSE, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
+
+    // Stop the command thread as the stream is closed.
+    mThreadEnabled = false;
+    mCommandQueue.stopWaiting();
+    mCommandThread.stop();
+
+    return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close_l() {
@@ -183,29 +213,10 @@
         return AAUDIO_OK;
     }
 
-    // This will call stopTimestampThread() and also stop the stream,
-    // just in case it was not already stopped.
+    // This will stop the stream, just in case it was not already stopped.
     stop_l();
 
-    aaudio_result_t result = AAUDIO_OK;
-    sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
-    if (endpoint == nullptr) {
-        result = AAUDIO_ERROR_INVALID_STATE;
-    } else {
-        endpoint->unregisterStream(this);
-        AAudioEndpointManager &endpointManager = AAudioEndpointManager::getInstance();
-        endpointManager.closeEndpoint(endpoint);
-
-        // AAudioService::closeStream() prevents two threads from closing at the same time.
-        mServiceEndpoint.clear(); // endpoint will hold the pointer after this method returns.
-    }
-
-    setState(AAUDIO_STREAM_STATE_CLOSED);
-
-    mediametrics::LogItem(mMetricsId)
-        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CLOSE)
-        .record();
-    return result;
+    return closeAndClear();
 }
 
 aaudio_result_t AAudioServiceStreamBase::startDevice() {
@@ -224,8 +235,10 @@
  * An AAUDIO_SERVICE_EVENT_STARTED will be sent to the client when complete.
  */
 aaudio_result_t AAudioServiceStreamBase::start() {
-    std::lock_guard<std::mutex> lock(mLock);
+    return sendCommand(START, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
+}
 
+aaudio_result_t AAudioServiceStreamBase::start_l() {
     const int64_t beginNs = AudioClock::getNanoseconds();
     aaudio_result_t result = AAUDIO_OK;
 
@@ -236,6 +249,12 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    if (mStandby) {
+        ALOGW("%s() the stream is standby, return ERROR_STANDBY, "
+              "expecting the client call exitStandby before start", __func__);
+        return AAUDIO_ERROR_STANDBY;
+    }
+
     mediametrics::Defer defer([&] {
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
@@ -261,15 +280,6 @@
     // This should happen at the end of the start.
     sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
     setState(AAUDIO_STREAM_STATE_STARTED);
-    mThreadEnabled.store(true);
-    // Make sure this object does not get deleted before the run() method
-    // can protect it by making a strong pointer.
-    incStrong(nullptr); // See run() method.
-    result = mTimestampThread.start(this);
-    if (result != AAUDIO_OK) {
-        decStrong(nullptr); // run() can't do it so we have to do it here.
-        goto error;
-    }
 
     return result;
 
@@ -279,8 +289,7 @@
 }
 
 aaudio_result_t AAudioServiceStreamBase::pause() {
-    std::lock_guard<std::mutex> lock(mLock);
-    return pause_l();
+    return sendCommand(PAUSE, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
 }
 
 aaudio_result_t AAudioServiceStreamBase::pause_l() {
@@ -298,12 +307,6 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
-    result = stopTimestampThread();
-    if (result != AAUDIO_OK) {
-        disconnect_l();
-        return result;
-    }
-
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -322,8 +325,7 @@
 }
 
 aaudio_result_t AAudioServiceStreamBase::stop() {
-    std::lock_guard<std::mutex> lock(mLock);
-    return stop_l();
+    return sendCommand(STOP, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
 }
 
 aaudio_result_t AAudioServiceStreamBase::stop_l() {
@@ -343,12 +345,6 @@
 
     setState(AAUDIO_STREAM_STATE_STOPPING);
 
-    // Temporarily unlock because we are joining the timestamp thread and it may try
-    // to acquire mLock.
-    mLock.unlock();
-    result = stopTimestampThread();
-    mLock.lock();
-
     if (result != AAUDIO_OK) {
         disconnect_l();
         return result;
@@ -373,17 +369,11 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamBase::stopTimestampThread() {
-    aaudio_result_t result = AAUDIO_OK;
-    // clear flag that tells thread to loop
-    if (mThreadEnabled.exchange(false)) {
-        result = mTimestampThread.stop();
-    }
-    return result;
+aaudio_result_t AAudioServiceStreamBase::flush() {
+    return sendCommand(FLUSH, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
 }
 
-aaudio_result_t AAudioServiceStreamBase::flush() {
-    std::lock_guard<std::mutex> lock(mLock);
+aaudio_result_t AAudioServiceStreamBase::flush_l() {
     aaudio_result_t result = AAudio_isFlushAllowed(getState());
     if (result != AAUDIO_OK) {
         return result;
@@ -404,48 +394,122 @@
     return AAUDIO_OK;
 }
 
-// implement Runnable, periodically send timestamps to client
+// implement Runnable, periodically send timestamps to client and process commands from queue.
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
-    ALOGD("%s() %s entering >>>>>>>>>>>>>> TIMESTAMPS", __func__, getTypeText());
+    ALOGD("%s() %s entering >>>>>>>>>>>>>> COMMANDS", __func__, getTypeText());
     // Hold onto the ref counted stream until the end.
     android::sp<AAudioServiceStreamBase> holdStream(this);
     TimestampScheduler timestampScheduler;
+    int64_t nextTime;
+    int64_t standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
     // Balance the incStrong from when the thread was launched.
     holdStream->decStrong(nullptr);
 
-    timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
-    timestampScheduler.start(AudioClock::getNanoseconds());
-    int64_t nextTime = timestampScheduler.nextAbsoluteTime();
+    // Taking mLock while starting the thread. All the operation must be able to
+    // run with holding the lock.
+    std::scoped_lock<std::mutex> _l(mLock);
+
     int32_t loopCount = 0;
-    aaudio_result_t result = AAUDIO_OK;
-    while(mThreadEnabled.load()) {
+    while (mThreadEnabled.load()) {
         loopCount++;
-        if (AudioClock::getNanoseconds() >= nextTime) {
-            result = sendCurrentTimestamp();
-            if (result != AAUDIO_OK) {
-                ALOGE("%s() timestamp thread got result = %d", __func__, result);
-                break;
+        int64_t timeoutNanos = -1;
+        if (isRunning() || (isIdle_l() && !isStandby_l())) {
+            timeoutNanos = (isRunning() ? nextTime : standbyTime) - AudioClock::getNanoseconds();
+            timeoutNanos = std::max<int64_t>(0, timeoutNanos);
+        }
+
+        auto command = mCommandQueue.waitForCommand(timeoutNanos);
+        if (!mThreadEnabled) {
+            // Break the loop if the thread is disabled.
+            break;
+        }
+
+        if (isRunning() && AudioClock::getNanoseconds() >= nextTime) {
+            // It is time to update timestamp.
+            if (sendCurrentTimestamp_l() != AAUDIO_OK) {
+                ALOGE("Failed to send current timestamp, stop updating timestamp");
+                disconnect_l();
+            } else {
+                nextTime = timestampScheduler.nextAbsoluteTime();
             }
-            nextTime = timestampScheduler.nextAbsoluteTime();
-        } else  {
-            // Sleep until it is time to send the next timestamp.
-            // TODO Wait for a signal with a timeout so that we can stop more quickly.
-            AudioClock::sleepUntilNanoTime(nextTime);
+        }
+        if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
+            standby_l();
+        }
+
+        if (command != nullptr) {
+            std::scoped_lock<std::mutex> _commandLock(command->lock);
+            switch (command->operationCode) {
+                case START:
+                    command->result = start_l();
+                    timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
+                    timestampScheduler.start(AudioClock::getNanoseconds());
+                    nextTime = timestampScheduler.nextAbsoluteTime();
+                    break;
+                case PAUSE:
+                    command->result = pause_l();
+                    standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+                    break;
+                case STOP:
+                    command->result = stop_l();
+                    standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+                    break;
+                case FLUSH:
+                    command->result = flush_l();
+                    break;
+                case CLOSE:
+                    command->result = close_l();
+                    break;
+                case DISCONNECT:
+                    disconnect_l();
+                    break;
+                case REGISTER_AUDIO_THREAD: {
+                    RegisterAudioThreadParam *param =
+                            (RegisterAudioThreadParam *) command->parameter.get();
+                    command->result =
+                            param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                             : registerAudioThread_l(param->mOwnerPid,
+                                                                     param->mClientThreadId,
+                                                                     param->mPriority);
+                }
+                    break;
+                case UNREGISTER_AUDIO_THREAD: {
+                    UnregisterAudioThreadParam *param =
+                            (UnregisterAudioThreadParam *) command->parameter.get();
+                    command->result =
+                            param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                             : unregisterAudioThread_l(param->mClientThreadId);
+                }
+                    break;
+                case GET_DESCRIPTION: {
+                    GetDescriptionParam *param = (GetDescriptionParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                        : getDescription_l(param->mParcelable);
+                }
+                    break;
+                case EXIT_STANDBY: {
+                    ExitStandbyParam *param = (ExitStandbyParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : exitStandby_l(param->mParcelable);
+                    standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+                } break;
+                default:
+                    ALOGE("Invalid command op code: %d", command->operationCode);
+                    break;
+            }
+            if (command->isWaitingForReply) {
+                command->isWaitingForReply = false;
+                command->conditionVariable.notify_one();
+            }
         }
     }
-    // This was moved from the calls in stop_l() and pause_l(), which could cause a deadlock
-    // if it resulted in a call to disconnect.
-    if (result == AAUDIO_OK) {
-        (void) sendCurrentTimestamp();
-    }
-    ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< TIMESTAMPS",
+    ALOGD("%s() %s exiting after %d loops <<<<<<<<<<<<<< COMMANDS",
           __func__, getTypeText(), loopCount);
 }
 
 void AAudioServiceStreamBase::disconnect() {
-    std::lock_guard<std::mutex> lock(mLock);
-    disconnect_l();
+    sendCommand(DISCONNECT);
 }
 
 void AAudioServiceStreamBase::disconnect_l() {
@@ -461,15 +525,21 @@
     }
 }
 
-aaudio_result_t AAudioServiceStreamBase::registerAudioThread(pid_t clientThreadId,
-        int priority) {
-    std::lock_guard<std::mutex> lock(mLock);
+aaudio_result_t AAudioServiceStreamBase::registerAudioThread(pid_t clientThreadId, int priority) {
+    const pid_t ownerPid = IPCThreadState::self()->getCallingPid(); // TODO review
+    return sendCommand(REGISTER_AUDIO_THREAD,
+            std::make_shared<RegisterAudioThreadParam>(ownerPid, clientThreadId, priority),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+}
+
+aaudio_result_t AAudioServiceStreamBase::registerAudioThread_l(
+        pid_t ownerPid, pid_t clientThreadId, int priority) {
     aaudio_result_t result = AAUDIO_OK;
     if (getRegisteredThread() != AAudioServiceStreamBase::ILLEGAL_THREAD_ID) {
         ALOGE("AAudioService::registerAudioThread(), thread already registered");
         result = AAUDIO_ERROR_INVALID_STATE;
     } else {
-        const pid_t ownerPid = IPCThreadState::self()->getCallingPid(); // TODO review
         setRegisteredThread(clientThreadId);
         int err = android::requestPriority(ownerPid, clientThreadId,
                                            priority, true /* isForApp */);
@@ -483,7 +553,13 @@
 }
 
 aaudio_result_t AAudioServiceStreamBase::unregisterAudioThread(pid_t clientThreadId) {
-    std::lock_guard<std::mutex> lock(mLock);
+    return sendCommand(UNREGISTER_AUDIO_THREAD,
+            std::make_shared<UnregisterAudioThreadParam>(clientThreadId),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+}
+
+aaudio_result_t AAudioServiceStreamBase::unregisterAudioThread_l(pid_t clientThreadId) {
     aaudio_result_t result = AAUDIO_OK;
     if (getRegisteredThread() != clientThreadId) {
         ALOGE("%s(), wrong thread", __func__);
@@ -552,7 +628,7 @@
     return sendServiceEvent(AAUDIO_SERVICE_EVENT_XRUN, (int64_t) xRunCount);
 }
 
-aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp() {
+aaudio_result_t AAudioServiceStreamBase::sendCurrentTimestamp_l() {
     AAudioServiceMessage command;
     // It is not worth filling up the queue with timestamps.
     // That can cause the stream to get suspended.
@@ -562,8 +638,8 @@
     }
 
     // Send a timestamp for the clock model.
-    aaudio_result_t result = getFreeRunningPosition(&command.timestamp.position,
-                                                    &command.timestamp.timestamp);
+    aaudio_result_t result = getFreeRunningPosition_l(&command.timestamp.position,
+                                                      &command.timestamp.timestamp);
     if (result == AAUDIO_OK) {
         ALOGV("%s() SERVICE  %8lld at %lld", __func__,
               (long long) command.timestamp.position,
@@ -573,8 +649,8 @@
 
         if (result == AAUDIO_OK) {
             // Send a hardware timestamp for presentation time.
-            result = getHardwareTimestamp(&command.timestamp.position,
-                                          &command.timestamp.timestamp);
+            result = getHardwareTimestamp_l(&command.timestamp.position,
+                                            &command.timestamp.timestamp);
             if (result == AAUDIO_OK) {
                 ALOGV("%s() HARDWARE %8lld at %lld", __func__,
                       (long long) command.timestamp.position,
@@ -596,7 +672,14 @@
  * used to communicate with the underlying HAL or Service.
  */
 aaudio_result_t AAudioServiceStreamBase::getDescription(AudioEndpointParcelable &parcelable) {
-    std::lock_guard<std::mutex> lock(mLock);
+    return sendCommand(
+            GET_DESCRIPTION,
+            std::make_shared<GetDescriptionParam>(&parcelable),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+}
+
+aaudio_result_t AAudioServiceStreamBase::getDescription_l(AudioEndpointParcelable* parcelable) {
     {
         std::lock_guard<std::mutex> lock(mUpMessageQueueLock);
         if (mUpMessageQueue == nullptr) {
@@ -605,11 +688,50 @@
         }
         // Gather information on the message queue.
         mUpMessageQueue->fillParcelable(parcelable,
-                                        parcelable.mUpMessageQueueParcelable);
+                                        parcelable->mUpMessageQueueParcelable);
     }
-    return getAudioDataDescription(parcelable);
+    return getAudioDataDescription_l(parcelable);
+}
+
+aaudio_result_t AAudioServiceStreamBase::exitStandby(AudioEndpointParcelable *parcelable) {
+    auto command = std::make_shared<AAudioCommand>(
+            EXIT_STANDBY,
+            std::make_shared<ExitStandbyParam>(parcelable),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
 }
 
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
+
+aaudio_result_t AAudioServiceStreamBase::sendCommand(aaudio_command_opcode opCode,
+                                                     std::shared_ptr<AAudioCommandParam> param,
+                                                     bool waitForReply,
+                                                     int64_t timeoutNanos) {
+    return mCommandQueue.sendCommand(std::make_shared<AAudioCommand>(
+            opCode, param, waitForReply, timeoutNanos));
+}
+
+aaudio_result_t AAudioServiceStreamBase::closeAndClear() {
+    aaudio_result_t result = AAUDIO_OK;
+    sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+    if (endpoint == nullptr) {
+        result = AAUDIO_ERROR_INVALID_STATE;
+    } else {
+        endpoint->unregisterStream(this);
+        AAudioEndpointManager &endpointManager = AAudioEndpointManager::getInstance();
+        endpointManager.closeEndpoint(endpoint);
+
+        // AAudioService::closeStream() prevents two threads from closing at the same time.
+        mServiceEndpoint.clear(); // endpoint will hold the pointer after this method returns.
+    }
+
+    setState(AAUDIO_STREAM_STATE_CLOSED);
+
+    mediametrics::LogItem(mMetricsId)
+        .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CLOSE)
+        .record();
+    return result;
+}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 976996d..b2ba725 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -33,8 +33,10 @@
 #include "utility/AAudioUtilities.h"
 #include "utility/AudioClock.h"
 
-#include "SharedRingBuffer.h"
+#include "AAudioCommandQueue.h"
 #include "AAudioThread.h"
+#include "SharedRingBuffer.h"
+#include "TimestampScheduler.h"
 
 namespace android {
     class AAudioService;
@@ -114,6 +116,11 @@
      */
     aaudio_result_t flush() EXCLUDES(mLock);
 
+    /**
+     * Exit standby mode. The MMAP buffer will be reallocated.
+     */
+    aaudio_result_t exitStandby(AudioEndpointParcelable *parcelable) EXCLUDES(mLock);
+
     virtual aaudio_result_t startClient(const android::AudioClient& client,
                                         const audio_attributes_t *attr __unused,
                                         audio_port_handle_t *clientHandle __unused) {
@@ -235,10 +242,46 @@
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request,
                          aaudio_sharing_mode_t sharingMode);
 
+    aaudio_result_t start_l() REQUIRES(mLock);
     virtual aaudio_result_t close_l() REQUIRES(mLock);
     virtual aaudio_result_t pause_l() REQUIRES(mLock);
     virtual aaudio_result_t stop_l() REQUIRES(mLock);
     void disconnect_l() REQUIRES(mLock);
+    aaudio_result_t flush_l() REQUIRES(mLock);
+
+    class RegisterAudioThreadParam : public AAudioCommandParam {
+    public:
+        RegisterAudioThreadParam(pid_t ownerPid, pid_t clientThreadId, int priority)
+                : AAudioCommandParam(), mOwnerPid(ownerPid),
+                  mClientThreadId(clientThreadId), mPriority(priority) { }
+        ~RegisterAudioThreadParam() = default;
+
+        pid_t mOwnerPid;
+        pid_t mClientThreadId;
+        int mPriority;
+    };
+    aaudio_result_t registerAudioThread_l(
+            pid_t ownerPid, pid_t clientThreadId, int priority) REQUIRES(mLock);
+
+    class UnregisterAudioThreadParam : public AAudioCommandParam {
+    public:
+        UnregisterAudioThreadParam(pid_t clientThreadId)
+                : AAudioCommandParam(), mClientThreadId(clientThreadId) { }
+        ~UnregisterAudioThreadParam() = default;
+
+        pid_t mClientThreadId;
+    };
+    aaudio_result_t unregisterAudioThread_l(pid_t clientThreadId) REQUIRES(mLock);
+
+    class GetDescriptionParam : public AAudioCommandParam {
+    public:
+        GetDescriptionParam(AudioEndpointParcelable* parcelable)
+                : AAudioCommandParam(), mParcelable(parcelable) { }
+        ~GetDescriptionParam() = default;
+
+        AudioEndpointParcelable* mParcelable;
+    };
+    aaudio_result_t getDescription_l(AudioEndpointParcelable* parcelable) REQUIRES(mLock);
 
     void setState(aaudio_stream_state_t state);
 
@@ -250,7 +293,7 @@
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
 
-    aaudio_result_t sendCurrentTimestamp() EXCLUDES(mLock);
+    aaudio_result_t sendCurrentTimestamp_l() REQUIRES(mLock);
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
@@ -259,11 +302,13 @@
      * @param timeNanos
      * @return AAUDIO_OK or AAUDIO_ERROR_UNAVAILABLE or other negative error
      */
-    virtual aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) = 0;
+    virtual aaudio_result_t getFreeRunningPosition_l(
+            int64_t *positionFrames, int64_t *timeNanos) = 0;
 
-    virtual aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) = 0;
+    virtual aaudio_result_t getHardwareTimestamp_l(int64_t *positionFrames, int64_t *timeNanos) = 0;
 
-    virtual aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) = 0;
+    virtual aaudio_result_t getAudioDataDescription_l(AudioEndpointParcelable* parcelable) = 0;
+
 
     aaudio_stream_state_t   mState = AAUDIO_STREAM_STATE_UNINITIALIZED;
 
@@ -274,14 +319,53 @@
         mDisconnected = flag;
     }
 
+    virtual aaudio_result_t standby_l() REQUIRES(mLock) {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+    class ExitStandbyParam : public AAudioCommandParam {
+    public:
+        ExitStandbyParam(AudioEndpointParcelable* parcelable)
+                : AAudioCommandParam(), mParcelable(parcelable) { }
+        ~ExitStandbyParam() = default;
+
+        AudioEndpointParcelable* mParcelable;
+    };
+    virtual aaudio_result_t exitStandby_l(
+            AudioEndpointParcelable* parcelable __unused) REQUIRES(mLock) {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+    bool isStandby_l() const REQUIRES(mLock) {
+        return mStandby;
+    }
+    void setStandby_l(bool standby) REQUIRES(mLock) {
+        mStandby = standby;
+    }
+
+    bool isIdle_l() const REQUIRES(mLock) {
+        return mState == AAUDIO_STREAM_STATE_OPEN || mState == AAUDIO_STREAM_STATE_PAUSED
+                || mState == AAUDIO_STREAM_STATE_STOPPED;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
     std::shared_ptr<SharedRingBuffer> mUpMessageQueue;
 
-    AAudioThread            mTimestampThread;
-    // This is used by one thread to tell another thread to exit. So it must be atomic.
+    enum : int32_t {
+        START,
+        PAUSE,
+        STOP,
+        FLUSH,
+        CLOSE,
+        DISCONNECT,
+        REGISTER_AUDIO_THREAD,
+        UNREGISTER_AUDIO_THREAD,
+        GET_DESCRIPTION,
+        EXIT_STANDBY,
+    };
+    AAudioThread            mCommandThread;
     std::atomic<bool>       mThreadEnabled{false};
+    AAudioCommandQueue      mCommandQueue;
 
     int32_t                 mFramesPerBurst = 0;
     android::AudioClient    mMmapClient; // set in open, used in MMAP start()
@@ -315,6 +399,13 @@
     aaudio_result_t sendServiceEvent(aaudio_service_event_t event,
                                      double dataDouble);
 
+    aaudio_result_t sendCommand(aaudio_command_opcode opCode,
+                                std::shared_ptr<AAudioCommandParam> param = nullptr,
+                                bool waitForReply = false,
+                                int64_t timeoutNanos = 0);
+
+    aaudio_result_t closeAndClear();
+
     /**
      * @return true if the queue is getting full.
      */
@@ -333,9 +424,13 @@
 
     bool                    mDisconnected GUARDED_BY(mLock) {false};
 
+    bool                    mStandby GUARDED_BY(mLock) = false;
+
 protected:
     // Locking order is important.
     // Acquire mLock before acquiring AAudioServiceEndpoint::mLockStreams
+    // The lock will be held by the command thread. All operations needing the lock must run from
+    // the command thread.
     std::mutex              mLock; // Prevent start/stop/close etcetera from colliding
 };
 
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 57dc1ab..ec9b2e2 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -117,6 +117,35 @@
     return result;
 }
 
+aaudio_result_t AAudioServiceStreamMMAP::standby_l() {
+    sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+    if (endpoint == nullptr) {
+        ALOGE("%s() has no endpoint", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    aaudio_result_t result = endpoint->standby();
+    if (result == AAUDIO_OK) {
+        setStandby_l(true);
+    }
+    return result;
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::exitStandby_l(AudioEndpointParcelable* parcelable) {
+    sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+    if (endpoint == nullptr) {
+        ALOGE("%s() has no endpoint", __func__);
+        return AAUDIO_ERROR_INVALID_STATE;
+    }
+    aaudio_result_t result = endpoint->exitStandby(parcelable);
+    if (result == AAUDIO_OK) {
+        setStandby_l(false);
+    } else {
+        ALOGE("%s failed, result %d, disconnecting stream.", __func__, result);
+        disconnect_l();
+    }
+    return result;
+}
+
 aaudio_result_t AAudioServiceStreamMMAP::startClient(const android::AudioClient& client,
                                                      const audio_attributes_t *attr,
                                                      audio_port_handle_t *clientHandle) {
@@ -141,7 +170,7 @@
 }
 
 // Get free-running DSP or DMA hardware position from the HAL.
-aaudio_result_t AAudioServiceStreamMMAP::getFreeRunningPosition(int64_t *positionFrames,
+aaudio_result_t AAudioServiceStreamMMAP::getFreeRunningPosition_l(int64_t *positionFrames,
                                                                   int64_t *timeNanos) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
@@ -158,16 +187,15 @@
         *positionFrames = timestamp.getPosition();
         *timeNanos = timestamp.getNanoseconds();
     } else if (result != AAUDIO_ERROR_UNAVAILABLE) {
-        disconnect();
+        disconnect_l();
     }
     return result;
 }
 
 // Get timestamp from presentation position.
 // If it fails, get timestamp that was written by getFreeRunningPosition()
-aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp(int64_t *positionFrames,
+aaudio_result_t AAudioServiceStreamMMAP::getHardwareTimestamp_l(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
-
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -176,17 +204,17 @@
     sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
             static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
 
-    // Disable this code temporarily because the HAL is not returning
-    // a useful result.
-#if 0
     uint64_t position;
-    if (serviceEndpointMMAP->getExternalPosition(&position, timeNanos) == AAUDIO_OK) {
-        ALOGD("%s() getExternalPosition() says pos = %" PRIi64 ", time = %" PRIi64,
+    aaudio_result_t result = serviceEndpointMMAP->getExternalPosition(&position, timeNanos);
+    if (result == AAUDIO_OK) {
+        ALOGV("%s() getExternalPosition() says pos = %" PRIi64 ", time = %" PRIi64,
                 __func__, position, *timeNanos);
         *positionFrames = (int64_t) position;
         return AAUDIO_OK;
-    } else
-#endif
+    } else {
+        ALOGV("%s() getExternalPosition() returns error %d", __func__, result);
+    }
+
     if (mAtomicStreamTimestamp.isValid()) {
         Timestamp timestamp = mAtomicStreamTimestamp.read();
         *positionFrames = timestamp.getPosition();
@@ -198,8 +226,8 @@
 }
 
 // Get an immutable description of the data queue from the HAL.
-aaudio_result_t AAudioServiceStreamMMAP::getAudioDataDescription(
-        AudioEndpointParcelable &parcelable)
+aaudio_result_t AAudioServiceStreamMMAP::getAudioDataDescription_l(
+        AudioEndpointParcelable* parcelable)
 {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 667465a..cd8c91e 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -71,12 +71,18 @@
 
     aaudio_result_t stop_l() REQUIRES(mLock) override;
 
-    aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
+    aaudio_result_t standby_l() REQUIRES(mLock) override;
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames,
-            int64_t *timeNanos) EXCLUDES(mLock) override;
+    aaudio_result_t exitStandby_l(AudioEndpointParcelable* parcelable) REQUIRES(mLock) override;
 
-    aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getAudioDataDescription_l(
+            AudioEndpointParcelable* parcelable) REQUIRES(mLock) override;
+
+    aaudio_result_t getFreeRunningPosition_l(int64_t *positionFrames,
+            int64_t *timeNanos) REQUIRES(mLock) override;
+
+    aaudio_result_t getHardwareTimestamp_l(
+            int64_t *positionFrames, int64_t *timeNanos) REQUIRES(mLock) override;
 
     /**
      * Device specific startup.
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index ad06d97..04fcd6d 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -211,8 +211,8 @@
 /**
  * Get an immutable description of the data queue created by this service.
  */
-aaudio_result_t AAudioServiceStreamShared::getAudioDataDescription(
-        AudioEndpointParcelable &parcelable)
+aaudio_result_t AAudioServiceStreamShared::getAudioDataDescription_l(
+        AudioEndpointParcelable* parcelable)
 {
     std::lock_guard<std::mutex> lock(audioDataQueueLock);
     if (mAudioDataQueue == nullptr) {
@@ -221,8 +221,8 @@
     }
     // Gather information on the data queue.
     mAudioDataQueue->fillParcelable(parcelable,
-                                    parcelable.mDownDataQueueParcelable);
-    parcelable.mDownDataQueueParcelable.setFramesPerBurst(getFramesPerBurst());
+                                    parcelable->mDownDataQueueParcelable);
+    parcelable->mDownDataQueueParcelable.setFramesPerBurst(getFramesPerBurst());
     return AAUDIO_OK;
 }
 
@@ -231,8 +231,8 @@
 }
 
 // Get timestamp that was written by mixer or distributor.
-aaudio_result_t AAudioServiceStreamShared::getFreeRunningPosition(int64_t *positionFrames,
-                                                                  int64_t *timeNanos) {
+aaudio_result_t AAudioServiceStreamShared::getFreeRunningPosition_l(int64_t *positionFrames,
+                                                                    int64_t *timeNanos) {
     // TODO Get presentation timestamp from the HAL
     if (mAtomicStreamTimestamp.isValid()) {
         Timestamp timestamp = mAtomicStreamTimestamp.read();
@@ -245,8 +245,8 @@
 }
 
 // Get timestamp from lower level service.
-aaudio_result_t AAudioServiceStreamShared::getHardwareTimestamp(int64_t *positionFrames,
-                                                                int64_t *timeNanos) {
+aaudio_result_t AAudioServiceStreamShared::getHardwareTimestamp_l(int64_t *positionFrames,
+                                                                  int64_t *timeNanos) {
 
     int64_t position = 0;
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 4fae5b4..78f9787 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -88,11 +88,14 @@
 
 protected:
 
-    aaudio_result_t getAudioDataDescription(AudioEndpointParcelable &parcelable) override;
+    aaudio_result_t getAudioDataDescription_l(
+            AudioEndpointParcelable* parcelable) REQUIRES(mLock) override;
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition_l(
+            int64_t *positionFrames, int64_t *timeNanos) REQUIRES(mLock) override;
 
-    aaudio_result_t getHardwareTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getHardwareTimestamp_l(
+            int64_t *positionFrames, int64_t *timeNanos) REQUIRES(mLock) override;
 
     /**
      * @param requestedCapacityFrames
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index 68496ac..549fa59 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -16,9 +16,10 @@
 
 #define LOG_TAG "AAudioThread"
 //#define LOG_NDEBUG 0
-#include <utils/Log.h>
 
-#include <pthread.h>
+#include <system_error>
+
+#include <utils/Log.h>
 
 #include <aaudio/AAudio.h>
 #include <utility/AAudioUtilities.h>
@@ -38,7 +39,7 @@
 }
 
 AAudioThread::~AAudioThread() {
-    ALOGE_IF(pthread_equal(pthread_self(), mThread),
+    ALOGE_IF(mThread.get_id() == std::this_thread::get_id(),
             "%s() destructor running in thread", __func__);
     ALOGE_IF(mHasThread, "%s() thread never joined", __func__);
 }
@@ -60,32 +61,16 @@
     }
 }
 
-// This is the entry point for the new thread created by createThread_l().
-// It converts the 'C' function call to a C++ method call.
-static void * AAudioThread_internalThreadProc(void *arg) {
-    AAudioThread *aaudioThread = (AAudioThread *) arg;
-    aaudioThread->dispatch();
-    return nullptr;
-}
-
 aaudio_result_t AAudioThread::start(Runnable *runnable) {
     if (mHasThread) {
         ALOGE("start() - mHasThread already true");
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    // mRunnable will be read by the new thread when it starts.
-    // pthread_create() forces a memory synchronization so mRunnable does not need to be atomic.
+    // mRunnable will be read by the new thread when it starts. A std::thread is created.
     mRunnable = runnable;
-    int err = pthread_create(&mThread, nullptr, AAudioThread_internalThreadProc, this);
-    if (err != 0) {
-        ALOGE("start() - pthread_create() returned %d %s", err, strerror(err));
-        return AAudioConvert_androidToAAudioResult(-err);
-    } else {
-        int err = pthread_setname_np(mThread, mName);
-        ALOGW_IF((err != 0), "Could not set name of AAudioThread. err = %d", err);
-        mHasThread = true;
-        return AAUDIO_OK;
-    }
+    mHasThread = true;
+    mThread = std::thread(&AAudioThread::dispatch, this);
+    return AAUDIO_OK;
 }
 
 aaudio_result_t AAudioThread::stop() {
@@ -93,18 +78,18 @@
         ALOGE("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
-    // Check to see if the thread is trying to stop itself.
-    if (pthread_equal(pthread_self(), mThread)) {
-        ALOGE("%s() attempt to pthread_join() from launched thread!", __func__);
-        return AAUDIO_ERROR_INTERNAL;
-    }
 
-    int err = pthread_join(mThread, nullptr);
-    if (err != 0) {
-        ALOGE("stop() - pthread_join() returned %d %s", err, strerror(err));
-        return AAudioConvert_androidToAAudioResult(-err);
-    } else {
+    if (mThread.get_id() == std::this_thread::get_id()) {
+        // The thread must not be joined by itself.
+        ALOGE("%s() attempt to join() from launched thread!", __func__);
+        return AAUDIO_ERROR_INTERNAL;
+    } else if (mThread.joinable()) {
+        // Double check if the thread is joinable to avoid exception when calling join.
+        mThread.join();
         mHasThread = false;
         return AAUDIO_OK;
+    } else {
+        ALOGE("%s() the thread is not joinable", __func__);
+        return AAUDIO_ERROR_INTERNAL;
     }
 }
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index 08a8a98..b2774e0 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -18,7 +18,7 @@
 #define AAUDIO_THREAD_H
 
 #include <atomic>
-#include <pthread.h>
+#include <thread>
 
 #include <aaudio/AAudio.h>
 
@@ -37,7 +37,6 @@
 
 /**
  * Abstraction for a host dependent thread.
- * TODO Consider using Android "Thread" class or std::thread instead.
  */
 class AAudioThread
 {
@@ -73,7 +72,7 @@
 
     Runnable    *mRunnable = nullptr;
     bool         mHasThread = false;
-    pthread_t    mThread = {};
+    std::thread  mThread;
 
     static std::atomic<uint32_t> mNextThreadIndex;
     char         mName[16]; // max length for a pthread_name
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 4c58040..80e4296 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -27,6 +27,7 @@
 
     srcs: [
         "AAudioClientTracker.cpp",
+        "AAudioCommandQueue.cpp",
         "AAudioEndpointManager.cpp",
         "AAudioMixer.cpp",
         "AAudioService.cpp",
@@ -68,6 +69,8 @@
         "aaudio-aidl-cpp",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
+        "android.media.audio.common.types-V1-cpp",
     ],
 
     export_shared_lib_headers: [
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index c1d4e16..fd2a454 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -85,9 +85,9 @@
     return AAUDIO_OK;
 }
 
-void SharedRingBuffer::fillParcelable(AudioEndpointParcelable &endpointParcelable,
+void SharedRingBuffer::fillParcelable(AudioEndpointParcelable* endpointParcelable,
                     RingBufferParcelable &ringBufferParcelable) {
-    int fdIndex = endpointParcelable.addFileDescriptor(mFileDescriptor, mSharedMemorySizeInBytes);
+    int fdIndex = endpointParcelable->addFileDescriptor(mFileDescriptor, mSharedMemorySizeInBytes);
     ringBufferParcelable.setupMemory(fdIndex,
                                      SHARED_RINGBUFFER_DATA_OFFSET,
                                      mDataMemorySizeInBytes,
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index c3a9bb7..cff1261 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -45,7 +45,7 @@
 
     aaudio_result_t allocate(android::fifo_frames_t bytesPerFrame, android::fifo_frames_t capacityInFrames);
 
-    void fillParcelable(AudioEndpointParcelable &endpointParcelable,
+    void fillParcelable(AudioEndpointParcelable* endpointParcelable,
                         RingBufferParcelable &ringBufferParcelable);
 
     /**
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index 17e8d36..5e48955 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -180,6 +180,11 @@
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
+    aaudio_result_t exitStandby(aaudio_handle_t streamHandle UNUSED_PARAM,
+                                AudioEndpointParcelable &parcelable UNUSED_PARAM) override {
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
     void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {}
 
     int getDeathCount() { return mDeathCount; }
diff --git a/services/tuner/.clang-format b/services/tuner/.clang-format
new file mode 100644
index 0000000..f14cc88
--- /dev/null
+++ b/services/tuner/.clang-format
@@ -0,0 +1,33 @@
+---
+BasedOnStyle: Google
+AllowShortFunctionsOnASingleLine: Inline
+AllowShortIfStatementsOnASingleLine: true
+AllowShortLoopsOnASingleLine: true
+BinPackArguments: true
+BinPackParameters: true
+CommentPragmas: NOLINT:.*
+ContinuationIndentWidth: 8
+DerivePointerAlignment: false
+IndentWidth: 4
+PointerAlignment: Left
+TabWidth: 4
+
+# Deviations from the above file:
+# "Don't indent the section label"
+AccessModifierOffset: -4
+# "Each line of text in your code should be at most 100 columns long."
+ColumnLimit: 100
+# "Constructor initializer lists can be all on one line or with subsequent
+# lines indented eight spaces.". clang-format does not support having the colon
+# on the same line as the constructor function name, so this is the best
+# approximation of that rule, which makes all entries in the list (except the
+# first one) have an eight space indentation.
+ConstructorInitializerIndentWidth: 6
+# There is nothing in go/droidcppstyle about case labels, but there seems to be
+# more code that does not indent the case labels in frameworks/base.
+IndentCaseLabels: false
+# There have been some bugs in which subsequent formatting operations introduce
+# weird comment jumps.
+ReflowComments: false
+# Android does support C++11 now.
+Standard: Cpp11
\ No newline at end of file
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index cd11c88..5c1dda1 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -7,33 +7,15 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-filegroup {
-    name: "tv_tuner_aidl",
-    srcs: [
-        "aidl/android/media/tv/tuner/*.aidl",
-    ],
-    path: "aidl",
-}
-
-filegroup {
-    name: "tv_tuner_frontend_info",
-    srcs: [
-        "aidl/android/media/tv/tuner/TunerFrontendInfo.aidl",
-        "aidl/android/media/tv/tuner/TunerFrontend*Capabilities.aidl",
-    ],
-    path: "aidl",
-}
-
 aidl_interface {
     name: "tv_tuner_aidl_interface",
     unstable: true,
     local_include_dir: "aidl",
-    srcs: [
-        ":tv_tuner_aidl",
-    ],
+    srcs: ["aidl/android/media/tv/tuner/*.aidl"],
     imports: [
         "android.hardware.common-V2",
         "android.hardware.common.fmq-V1",
+        "android.hardware.tv.tuner-V1",
     ],
 
     backend: {
@@ -49,37 +31,18 @@
     },
 }
 
-aidl_interface {
-    name: "tv_tuner_frontend_info_aidl_interface",
-    unstable: true,
-    local_include_dir: "aidl",
-    srcs: [
-        ":tv_tuner_frontend_info",
-    ],
-
-    backend: {
-        java: {
-            enabled: true,
-        },
-        cpp: {
-            enabled: true,
-        },
-        ndk: {
-            enabled: true,
-        },
-    },
-}
-
 cc_library {
     name: "libtunerservice",
 
     srcs: [
         "Tuner*.cpp",
+        "hidl/Tuner*.cpp",
     ],
 
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
+        "android.hardware.tv.tuner-V1-ndk",
         "libbase",
         "libbinder",
         "libbinder_ndk",
@@ -89,13 +52,13 @@
         "liblog",
         "libmedia",
         "libutils",
-        "tv_tuner_aidl_interface-ndk_platform",
-        "tv_tuner_resource_manager_aidl_interface-ndk_platform",
-        "tv_tuner_resource_manager_aidl_interface-cpp",
+        "packagemanager_aidl-cpp",
+        "tv_tuner_aidl_interface-ndk",
+        "tv_tuner_resource_manager_aidl_interface-ndk",
     ],
 
     static_libs: [
-        "android.hardware.common.fmq-V1-ndk_platform",
+        "android.hardware.common.fmq-V1-ndk",
         "libaidlcommonsupport",
     ],
 
@@ -122,18 +85,15 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
+        "android.hardware.tv.tuner-V1-ndk",
         "libbase",
         "libbinder",
         "libfmq",
         "liblog",
         "libtunerservice",
         "libutils",
-        "tv_tuner_resource_manager_aidl_interface-ndk_platform",
-        "tv_tuner_resource_manager_aidl_interface-cpp",
-    ],
-
-    static_libs: [
-        "tv_tuner_aidl_interface-ndk_platform",
+        "tv_tuner_aidl_interface-ndk",
+        "tv_tuner_resource_manager_aidl_interface-ndk",
     ],
 
     init_rc: ["mediatuner.rc"],
diff --git a/services/tuner/OWNERS b/services/tuner/OWNERS
index 0ceb8e8..bf9fe34 100644
--- a/services/tuner/OWNERS
+++ b/services/tuner/OWNERS
@@ -1,2 +1,2 @@
-nchalko@google.com
+hgchen@google.com
 quxiangfang@google.com
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index 1122368..a6f3a2c 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -16,23 +16,32 @@
 
 #define LOG_TAG "TunerDemux"
 
-#include "TunerDvr.h"
 #include "TunerDemux.h"
+
+#include <aidl/android/hardware/tv/tuner/IDvr.h>
+#include <aidl/android/hardware/tv/tuner/IDvrCallback.h>
+#include <aidl/android/hardware/tv/tuner/IFilter.h>
+#include <aidl/android/hardware/tv/tuner/IFilterCallback.h>
+#include <aidl/android/hardware/tv/tuner/ITimeFilter.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+
+#include "TunerDvr.h"
 #include "TunerTimeFilter.h"
 
-using ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxIpFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
-using ::android::hardware::tv::tuner::V1_0::DvrType;
-using ::android::hardware::tv::tuner::V1_0::Result;
+using ::aidl::android::hardware::tv::tuner::IDvr;
+using ::aidl::android::hardware::tv::tuner::IDvrCallback;
+using ::aidl::android::hardware::tv::tuner::IFilter;
+using ::aidl::android::hardware::tv::tuner::IFilterCallback;
+using ::aidl::android::hardware::tv::tuner::ITimeFilter;
+using ::aidl::android::hardware::tv::tuner::Result;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-TunerDemux::TunerDemux(sp<IDemux> demux, int id) {
+TunerDemux::TunerDemux(shared_ptr<IDemux> demux, int id) {
     mDemux = demux;
     mDemuxId = id;
 }
@@ -41,192 +50,143 @@
     mDemux = nullptr;
 }
 
-Status TunerDemux::setFrontendDataSource(const std::shared_ptr<ITunerFrontend>& frontend) {
+::ndk::ScopedAStatus TunerDemux::setFrontendDataSource(
+        const shared_ptr<ITunerFrontend>& in_frontend) {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
     int frontendId;
-    frontend->getFrontendId(&frontendId);
-    Result res = mDemux->setFrontendDataSource(frontendId);
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    in_frontend->getFrontendId(&frontendId);
+
+    return mDemux->setFrontendDataSource(frontendId);
 }
 
-Status TunerDemux::openFilter(
-        int type, int subType, int bufferSize, const std::shared_ptr<ITunerFilterCallback>& cb,
-        std::shared_ptr<ITunerFilter>* _aidl_return) {
+::ndk::ScopedAStatus TunerDemux::setFrontendDataSourceById(int frontendId) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mDemux->setFrontendDataSource(frontendId);
+}
+
+::ndk::ScopedAStatus TunerDemux::openFilter(const DemuxFilterType& in_type, int32_t in_bufferSize,
+                                            const shared_ptr<ITunerFilterCallback>& in_cb,
+                                            shared_ptr<ITunerFilter>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    shared_ptr<IFilter> filter;
+    shared_ptr<TunerFilter::FilterCallback> filterCb =
+            ::ndk::SharedRefBase::make<TunerFilter::FilterCallback>(in_cb);
+    shared_ptr<IFilterCallback> cb = filterCb;
+    auto status = mDemux->openFilter(in_type, in_bufferSize, cb, &filter);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type);
+    }
+
+    return status;
+}
+
+::ndk::ScopedAStatus TunerDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    DemuxFilterMainType mainType = static_cast<DemuxFilterMainType>(type);
-    DemuxFilterType filterType {
-        .mainType = mainType,
-    };
-
-    switch(mainType) {
-        case DemuxFilterMainType::TS:
-            filterType.subType.tsFilterType(static_cast<DemuxTsFilterType>(subType));
-            break;
-        case DemuxFilterMainType::MMTP:
-            filterType.subType.mmtpFilterType(static_cast<DemuxMmtpFilterType>(subType));
-            break;
-        case DemuxFilterMainType::IP:
-            filterType.subType.ipFilterType(static_cast<DemuxIpFilterType>(subType));
-            break;
-        case DemuxFilterMainType::TLV:
-            filterType.subType.tlvFilterType(static_cast<DemuxTlvFilterType>(subType));
-            break;
-        case DemuxFilterMainType::ALP:
-            filterType.subType.alpFilterType(static_cast<DemuxAlpFilterType>(subType));
-            break;
-    }
-    Result status;
-    sp<IFilter> filterSp;
-    sp<IFilterCallback> cbSp = new TunerFilter::FilterCallback(cb);
-    mDemux->openFilter(filterType, bufferSize, cbSp,
-            [&](Result r, const sp<IFilter>& filter) {
-                filterSp = filter;
-                status = r;
-            });
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    shared_ptr<ITimeFilter> filter;
+    auto status = mDemux->openTimeFilter(&filter);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerTimeFilter>(filter);
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filterSp, type, subType);
-    return Status::ok();
+    return status;
 }
 
-Status TunerDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
+::ndk::ScopedAStatus TunerDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
+                                               int32_t* _aidl_return) {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    sp<ITimeFilter> filterSp;
-    mDemux->openTimeFilter([&](Result r, const sp<ITimeFilter>& filter) {
-        filterSp = filter;
-        status = r;
-    });
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerTimeFilter>(filterSp);
-    return Status::ok();
+    shared_ptr<IFilter> halFilter = (static_cast<TunerFilter*>(tunerFilter.get()))->getHalFilter();
+    return mDemux->getAvSyncHwId(halFilter, _aidl_return);
 }
 
-Status TunerDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter, int* _aidl_return) {
+::ndk::ScopedAStatus TunerDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    uint32_t avSyncHwId;
-    Result res;
-    sp<IFilter> halFilter = static_cast<TunerFilter*>(tunerFilter.get())->getHalFilter();
-    mDemux->getAvSyncHwId(halFilter,
-            [&](Result r, uint32_t id) {
-                res = r;
-                avSyncHwId = id;
-            });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    *_aidl_return = (int)avSyncHwId;
-    return Status::ok();
+    return mDemux->getAvSyncTime(avSyncHwId, _aidl_return);
 }
 
-Status TunerDemux::getAvSyncTime(int avSyncHwId, int64_t* _aidl_return) {
+::ndk::ScopedAStatus TunerDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
+                                         const shared_ptr<ITunerDvrCallback>& in_cb,
+                                         shared_ptr<ITunerDvr>* _aidl_return) {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    uint64_t time;
-    Result res;
-    mDemux->getAvSyncTime(static_cast<uint32_t>(avSyncHwId),
-            [&](Result r, uint64_t ts) {
-                res = r;
-                time = ts;
-            });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    shared_ptr<IDvrCallback> callback = ::ndk::SharedRefBase::make<TunerDvr::DvrCallback>(in_cb);
+    shared_ptr<IDvr> halDvr;
+    auto res = mDemux->openDvr(in_dvbType, in_bufferSize, callback, &halDvr);
+    if (res.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerDvr>(halDvr, in_dvbType);
     }
 
-    *_aidl_return = (int64_t)time;
-    return Status::ok();
+    return res;
 }
 
-Status TunerDemux::openDvr(int dvrType, int bufferSize, const shared_ptr<ITunerDvrCallback>& cb,
-        shared_ptr<ITunerDvr>* _aidl_return) {
+::ndk::ScopedAStatus TunerDemux::connectCiCam(int32_t ciCamId) {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res;
-    sp<IDvrCallback> callback = new TunerDvr::DvrCallback(cb);
-    sp<IDvr> hidlDvr;
-    mDemux->openDvr(static_cast<DvrType>(dvrType), bufferSize, callback,
-            [&](Result r, const sp<IDvr>& dvr) {
-                hidlDvr = dvr;
-                res = r;
-            });
-    if (res != Result::SUCCESS) {
-        *_aidl_return = NULL;
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerDvr>(hidlDvr, dvrType);
-    return Status::ok();
+    return mDemux->connectCiCam(ciCamId);
 }
 
-Status TunerDemux::connectCiCam(int ciCamId) {
+::ndk::ScopedAStatus TunerDemux::disconnectCiCam() {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDemux->connectCiCam(static_cast<uint32_t>(ciCamId));
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDemux->disconnectCiCam();
 }
 
-Status TunerDemux::disconnectCiCam() {
+::ndk::ScopedAStatus TunerDemux::close() {
     if (mDemux == nullptr) {
         ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDemux->disconnectCiCam();
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    auto res = mDemux->close();
+    mDemux = nullptr;
+
+    return res;
 }
 
-Status TunerDemux::close() {
-    if (mDemux == nullptr) {
-        ALOGE("IDemux is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    Result res = mDemux->close();
-    mDemux = NULL;
-
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
-}
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index 2a9836b..cdb3aa0 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -17,52 +17,55 @@
 #ifndef ANDROID_MEDIA_TUNERDEMUX_H
 #define ANDROID_MEDIA_TUNERDEMUX_H
 
+#include <aidl/android/hardware/tv/tuner/IDemux.h>
 #include <aidl/android/media/tv/tuner/BnTunerDemux.h>
-#include <android/hardware/tv/tuner/1.0/ITuner.h>
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::tv::tuner::BnTunerDemux;
-using ::aidl::android::media::tv::tuner::ITunerDvr;
-using ::aidl::android::media::tv::tuner::ITunerDvrCallback;
-using ::aidl::android::media::tv::tuner::ITunerFilter;
-using ::aidl::android::media::tv::tuner::ITunerFilterCallback;
-using ::aidl::android::media::tv::tuner::ITunerFrontend;
-using ::aidl::android::media::tv::tuner::ITunerTimeFilter;
-using ::android::hardware::tv::tuner::V1_0::IDemux;
-using ::android::hardware::tv::tuner::V1_0::IDvr;
-using ::android::hardware::tv::tuner::V1_0::IDvrCallback;
-using ::android::hardware::tv::tuner::V1_0::ITimeFilter;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterType;
+using ::aidl::android::hardware::tv::tuner::DvrType;
+using ::aidl::android::hardware::tv::tuner::IDemux;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
 class TunerDemux : public BnTunerDemux {
 
 public:
-    TunerDemux(sp<IDemux> demux, int demuxId);
+    TunerDemux(shared_ptr<IDemux> demux, int demuxId);
     virtual ~TunerDemux();
-    Status setFrontendDataSource(const shared_ptr<ITunerFrontend>& frontend) override;
-    Status openFilter(
-        int mainType, int subtype, int bufferSize, const shared_ptr<ITunerFilterCallback>& cb,
-        shared_ptr<ITunerFilter>* _aidl_return) override;
-    Status openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) override;
-    Status getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter, int* _aidl_return) override;
-    Status getAvSyncTime(int avSyncHwId, int64_t* _aidl_return) override;
-    Status openDvr(
-        int dvbType, int bufferSize, const shared_ptr<ITunerDvrCallback>& cb,
-        shared_ptr<ITunerDvr>* _aidl_return) override;
-    Status connectCiCam(int ciCamId) override;
-    Status disconnectCiCam() override;
-    Status close() override;
+
+    ::ndk::ScopedAStatus setFrontendDataSource(
+            const shared_ptr<ITunerFrontend>& in_frontend) override;
+    ::ndk::ScopedAStatus setFrontendDataSourceById(int frontendId) override;
+    ::ndk::ScopedAStatus openFilter(const DemuxFilterType& in_type, int32_t in_bufferSize,
+                                    const shared_ptr<ITunerFilterCallback>& in_cb,
+                                    shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus getAvSyncHwId(const shared_ptr<ITunerFilter>& in_tunerFilter,
+                                       int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getAvSyncTime(int32_t in_avSyncHwId, int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus openDvr(DvrType in_dvbType, int32_t in_bufferSize,
+                                 const shared_ptr<ITunerDvrCallback>& in_cb,
+                                 shared_ptr<ITunerDvr>* _aidl_return) override;
+    ::ndk::ScopedAStatus connectCiCam(int32_t in_ciCamId) override;
+    ::ndk::ScopedAStatus disconnectCiCam() override;
+    ::ndk::ScopedAStatus close() override;
 
     int getId() { return mDemuxId; }
 
 private:
-    sp<IDemux> mDemux;
+    shared_ptr<IDemux> mDemux;
     int mDemuxId;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERDEMUX_H
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index b7ae167..70aee20 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -16,17 +16,27 @@
 
 #define LOG_TAG "TunerDescrambler"
 
-#include "TunerFilter.h"
-#include "TunerDemux.h"
 #include "TunerDescrambler.h"
 
-using ::android::hardware::tv::tuner::V1_0::Result;
+#include <aidl/android/hardware/tv/tuner/IFilter.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <utils/Log.h>
+
+#include "TunerDemux.h"
+#include "TunerFilter.h"
+
+using ::aidl::android::hardware::tv::tuner::IFilter;
+using ::aidl::android::hardware::tv::tuner::Result;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-TunerDescrambler::TunerDescrambler(sp<IDescrambler> descrambler) {
+TunerDescrambler::TunerDescrambler(shared_ptr<IDescrambler> descrambler) {
     mDescrambler = descrambler;
 }
 
@@ -34,91 +44,74 @@
     mDescrambler = nullptr;
 }
 
-Status TunerDescrambler::setDemuxSource(const std::shared_ptr<ITunerDemux>& demux) {
+::ndk::ScopedAStatus TunerDescrambler::setDemuxSource(
+        const shared_ptr<ITunerDemux>& in_tunerDemux) {
     if (mDescrambler == nullptr) {
         ALOGE("IDescrambler is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->setDemuxSource(static_cast<TunerDemux*>(demux.get())->getId());
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDescrambler->setDemuxSource((static_cast<TunerDemux*>(in_tunerDemux.get()))->getId());
 }
 
-Status TunerDescrambler::setKeyToken(const vector<uint8_t>& keyToken) {
+::ndk::ScopedAStatus TunerDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
     if (mDescrambler == nullptr) {
         ALOGE("IDescrambler is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->setKeyToken(keyToken);
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDescrambler->setKeyToken(in_keyToken);
 }
 
-Status TunerDescrambler::addPid(const TunerDemuxPid& pid,
-        const shared_ptr<ITunerFilter>& optionalSourceFilter) {
+::ndk::ScopedAStatus TunerDescrambler::addPid(
+        const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
     if (mDescrambler == nullptr) {
         ALOGE("IDescrambler is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
-            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
-    Result res = mDescrambler->addPid(getHidlDemuxPid(pid), halFilter);
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    shared_ptr<IFilter> halFilter =
+            (in_optionalSourceFilter == nullptr)
+                    ? nullptr
+                    : static_cast<TunerFilter*>(in_optionalSourceFilter.get())->getHalFilter();
+
+    return mDescrambler->addPid(in_pid, halFilter);
 }
 
-Status TunerDescrambler::removePid(const TunerDemuxPid& pid,
-        const shared_ptr<ITunerFilter>& optionalSourceFilter) {
+::ndk::ScopedAStatus TunerDescrambler::removePid(
+        const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
     if (mDescrambler == nullptr) {
         ALOGE("IDescrambler is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    sp<IFilter> halFilter = (optionalSourceFilter == NULL)
-            ? NULL : static_cast<TunerFilter*>(optionalSourceFilter.get())->getHalFilter();
-    Result res = mDescrambler->removePid(getHidlDemuxPid(pid), halFilter);
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    shared_ptr<IFilter> halFilter =
+            (in_optionalSourceFilter == nullptr)
+                    ? nullptr
+                    : static_cast<TunerFilter*>(in_optionalSourceFilter.get())->getHalFilter();
+
+    return mDescrambler->removePid(in_pid, halFilter);
 }
 
-Status TunerDescrambler::close() {
+::ndk::ScopedAStatus TunerDescrambler::close() {
     if (mDescrambler == nullptr) {
         ALOGE("IDescrambler is not initialized.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDescrambler->close();
-    mDescrambler = NULL;
+    auto res = mDescrambler->close();
+    mDescrambler = nullptr;
 
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return res;
 }
 
-DemuxPid TunerDescrambler::getHidlDemuxPid(const TunerDemuxPid& pid) {
-    DemuxPid hidlPid;
-    switch (pid.getTag()) {
-        case TunerDemuxPid::tPid: {
-            hidlPid.tPid((uint16_t)pid.get<TunerDemuxPid::tPid>());
-            break;
-        }
-        case TunerDemuxPid::mmtpPid: {
-            hidlPid.mmtpPid((uint16_t)pid.get<TunerDemuxPid::mmtpPid>());
-            break;
-        }
-    }
-    return hidlPid;
-}
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerDescrambler.h b/services/tuner/TunerDescrambler.h
index 1970fb7..b1d5fb9 100644
--- a/services/tuner/TunerDescrambler.h
+++ b/services/tuner/TunerDescrambler.h
@@ -17,38 +17,43 @@
 #ifndef ANDROID_MEDIA_TUNERDESCRAMBLER_H
 #define ANDROID_MEDIA_TUNERDESCRAMBLER_H
 
+#include <aidl/android/hardware/tv/tuner/IDescrambler.h>
 #include <aidl/android/media/tv/tuner/BnTunerDescrambler.h>
-#include <android/hardware/tv/tuner/1.0/ITuner.h>
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::tv::tuner::BnTunerDescrambler;
-using ::aidl::android::media::tv::tuner::ITunerDemux;
-using ::aidl::android::media::tv::tuner::ITunerFilter;
-using ::aidl::android::media::tv::tuner::TunerDemuxPid;
-using ::android::hardware::tv::tuner::V1_0::DemuxPid;
-using ::android::hardware::tv::tuner::V1_0::IDescrambler;
+using ::aidl::android::hardware::tv::tuner::DemuxPid;
+using ::aidl::android::hardware::tv::tuner::IDescrambler;
 
+using namespace std;
+
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
 class TunerDescrambler : public BnTunerDescrambler {
 
 public:
-    TunerDescrambler(sp<IDescrambler> descrambler);
+    TunerDescrambler(shared_ptr<IDescrambler> descrambler);
     virtual ~TunerDescrambler();
-    Status setDemuxSource(const shared_ptr<ITunerDemux>& demux) override;
-    Status setKeyToken(const vector<uint8_t>& keyToken) override;
-    Status addPid(const TunerDemuxPid& pid,
-            const shared_ptr<ITunerFilter>& optionalSourceFilter) override;
-    Status removePid(const TunerDemuxPid& pid,
-            const shared_ptr<ITunerFilter>& optionalSourceFilter) override;
-    Status close() override;
+
+    ::ndk::ScopedAStatus setDemuxSource(const shared_ptr<ITunerDemux>& in_tunerDemux) override;
+    ::ndk::ScopedAStatus setKeyToken(const vector<uint8_t>& in_keyToken) override;
+    ::ndk::ScopedAStatus addPid(const DemuxPid& in_pid,
+                                const shared_ptr<ITunerFilter>& in_optionalSourceFilter) override;
+    ::ndk::ScopedAStatus removePid(
+            const DemuxPid& in_pid,
+            const shared_ptr<ITunerFilter>& in_optionalSourceFilter) override;
+    ::ndk::ScopedAStatus close() override;
 
 private:
-    DemuxPid getHidlDemuxPid(const TunerDemuxPid& pid);
-
-    sp<IDescrambler> mDescrambler;
+    shared_ptr<IDescrambler> mDescrambler;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERDESCRAMBLER_H
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index db4e07b..8776f7e 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -16,194 +16,152 @@
 
 #define LOG_TAG "TunerDvr"
 
-#include <fmq/ConvertMQDescriptors.h>
 #include "TunerDvr.h"
+
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <utils/Log.h>
+
 #include "TunerFilter.h"
 
-using ::android::hardware::tv::tuner::V1_0::DataFormat;
-using ::android::hardware::tv::tuner::V1_0::Result;
+using ::aidl::android::hardware::tv::tuner::Result;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-TunerDvr::TunerDvr(sp<IDvr> dvr, int type) {
+TunerDvr::TunerDvr(shared_ptr<IDvr> dvr, DvrType type) {
     mDvr = dvr;
-    mType = static_cast<DvrType>(type);
+    mType = type;
 }
 
 TunerDvr::~TunerDvr() {
-    mDvr = NULL;
+    mDvr = nullptr;
 }
 
-Status TunerDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    MQDesc dvrMQDesc;
-    Result res;
-    mDvr->getQueueDesc([&](Result r, const MQDesc& desc) {
-        dvrMQDesc = desc;
-        res = r;
-    });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    AidlMQDesc aidlMQDesc;
-    unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
-                dvrMQDesc,  &aidlMQDesc);
-    *_aidl_return = move(aidlMQDesc);
-    return Status::ok();
+    return mDvr->getQueueDesc(_aidl_return);
 }
 
-Status TunerDvr::configure(const TunerDvrSettings& settings) {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::configure(const DvrSettings& in_settings) {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDvr->configure(getHidlDvrSettingsFromAidl(settings));
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDvr->configure(in_settings);
 }
 
-Status TunerDvr::attachFilter(const shared_ptr<ITunerFilter>& filter) {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    ITunerFilter* tunerFilter = filter.get();
-    sp<IFilter> hidlFilter = static_cast<TunerFilter*>(tunerFilter)->getHalFilter();
-    if (hidlFilter == NULL) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    if (in_filter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
 
-    Result res = mDvr->attachFilter(hidlFilter);
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    shared_ptr<IFilter> halFilter = (static_cast<TunerFilter*>(in_filter.get()))->getHalFilter();
+    if (halFilter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
-    return Status::ok();
+
+    return mDvr->attachFilter(halFilter);
 }
 
-Status TunerDvr::detachFilter(const shared_ptr<ITunerFilter>& filter) {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    ITunerFilter* tunerFilter = filter.get();
-    sp<IFilter> hidlFilter = static_cast<TunerFilter*>(tunerFilter)->getHalFilter();
-    if (hidlFilter == NULL) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    if (in_filter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
 
-    Result res = mDvr->detachFilter(hidlFilter);
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    shared_ptr<IFilter> halFilter = (static_cast<TunerFilter*>(in_filter.get()))->getHalFilter();
+    if (halFilter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
-    return Status::ok();
+
+    return mDvr->detachFilter(halFilter);
 }
 
-Status TunerDvr::start() {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::start() {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDvr->start();
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDvr->start();
 }
 
-Status TunerDvr::stop() {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::stop() {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDvr->stop();
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDvr->stop();
 }
 
-Status TunerDvr::flush() {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::flush() {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDvr->flush();
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return mDvr->flush();
 }
 
-Status TunerDvr::close() {
-    if (mDvr == NULL) {
+::ndk::ScopedAStatus TunerDvr::close() {
+    if (mDvr == nullptr) {
         ALOGE("IDvr is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mDvr->close();
-    mDvr = NULL;
+    auto status = mDvr->close();
+    mDvr = nullptr;
 
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
-}
-
-DvrSettings TunerDvr::getHidlDvrSettingsFromAidl(TunerDvrSettings settings) {
-    DvrSettings s;
-    switch (mType) {
-        case DvrType::PLAYBACK: {
-            s.playback({
-                .statusMask = static_cast<uint8_t>(settings.statusMask),
-                .lowThreshold = static_cast<uint32_t>(settings.lowThreshold),
-                .highThreshold = static_cast<uint32_t>(settings.highThreshold),
-                .dataFormat = static_cast<DataFormat>(settings.dataFormat),
-                .packetSize = static_cast<uint8_t>(settings.packetSize),
-            });
-            return s;
-        }
-        case DvrType::RECORD: {
-            s.record({
-                .statusMask = static_cast<uint8_t>(settings.statusMask),
-                .lowThreshold = static_cast<uint32_t>(settings.lowThreshold),
-                .highThreshold = static_cast<uint32_t>(settings.highThreshold),
-                .dataFormat = static_cast<DataFormat>(settings.dataFormat),
-                .packetSize = static_cast<uint8_t>(settings.packetSize),
-            });
-            return s;
-        }
-        default:
-            break;
-    }
-    return s;
+    return status;
 }
 
 /////////////// IDvrCallback ///////////////////////
-
-Return<void> TunerDvr::DvrCallback::onRecordStatus(const RecordStatus status) {
-    if (mTunerDvrCallback != NULL) {
-        mTunerDvrCallback->onRecordStatus(static_cast<int>(status));
+::ndk::ScopedAStatus TunerDvr::DvrCallback::onRecordStatus(const RecordStatus status) {
+    if (mTunerDvrCallback != nullptr) {
+        mTunerDvrCallback->onRecordStatus(status);
     }
-    return Void();
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<void> TunerDvr::DvrCallback::onPlaybackStatus(const PlaybackStatus status) {
-    if (mTunerDvrCallback != NULL) {
-        mTunerDvrCallback->onPlaybackStatus(static_cast<int>(status));
+::ndk::ScopedAStatus TunerDvr::DvrCallback::onPlaybackStatus(const PlaybackStatus status) {
+    if (mTunerDvrCallback != nullptr) {
+        mTunerDvrCallback->onPlaybackStatus(status);
     }
-    return Void();
+    return ndk::ScopedAStatus::ok();
 }
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index a508e99..1854d08 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -17,81 +17,71 @@
 #ifndef ANDROID_MEDIA_TUNERDVR_H
 #define ANDROID_MEDIA_TUNERDVR_H
 
+#include <aidl/android/hardware/tv/tuner/BnDvrCallback.h>
+#include <aidl/android/hardware/tv/tuner/DvrSettings.h>
+#include <aidl/android/hardware/tv/tuner/DvrType.h>
+#include <aidl/android/hardware/tv/tuner/IDvr.h>
+#include <aidl/android/hardware/tv/tuner/PlaybackStatus.h>
+#include <aidl/android/hardware/tv/tuner/RecordStatus.h>
 #include <aidl/android/media/tv/tuner/BnTunerDvr.h>
 #include <aidl/android/media/tv/tuner/ITunerDvrCallback.h>
-#include <android/hardware/tv/tuner/1.0/ITuner.h>
-#include <fmq/MessageQueue.h>
 
-#include <TunerFilter.h>
+#include "TunerFilter.h"
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::hardware::common::fmq::GrantorDescriptor;
 using ::aidl::android::hardware::common::fmq::MQDescriptor;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
-using ::aidl::android::media::tv::tuner::BnTunerDvr;
-using ::aidl::android::media::tv::tuner::ITunerDvrCallback;
-using ::aidl::android::media::tv::tuner::ITunerFilter;
-using ::aidl::android::media::tv::tuner::TunerDvrSettings;
-
-using ::android::hardware::MQDescriptorSync;
-using ::android::hardware::MessageQueue;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-
-using ::android::hardware::tv::tuner::V1_0::DvrSettings;
-using ::android::hardware::tv::tuner::V1_0::DvrType;
-using ::android::hardware::tv::tuner::V1_0::IDvr;
-using ::android::hardware::tv::tuner::V1_0::IDvrCallback;
-using ::android::hardware::tv::tuner::V1_0::PlaybackStatus;
-using ::android::hardware::tv::tuner::V1_0::RecordStatus;
+using ::aidl::android::hardware::tv::tuner::BnDvrCallback;
+using ::aidl::android::hardware::tv::tuner::DvrSettings;
+using ::aidl::android::hardware::tv::tuner::DvrType;
+using ::aidl::android::hardware::tv::tuner::IDvr;
+using ::aidl::android::hardware::tv::tuner::PlaybackStatus;
+using ::aidl::android::hardware::tv::tuner::RecordStatus;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-using MQDesc = MQDescriptorSync<uint8_t>;
 using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
 
 class TunerDvr : public BnTunerDvr {
 
 public:
-    TunerDvr(sp<IDvr> dvr, int type);
+    TunerDvr(shared_ptr<IDvr> dvr, DvrType type);
     ~TunerDvr();
 
-    Status getQueueDesc(AidlMQDesc* _aidl_return) override;
+    ::ndk::ScopedAStatus getQueueDesc(AidlMQDesc* _aidl_return) override;
+    ::ndk::ScopedAStatus configure(const DvrSettings& in_settings) override;
+    ::ndk::ScopedAStatus attachFilter(const shared_ptr<ITunerFilter>& in_filter) override;
+    ::ndk::ScopedAStatus detachFilter(const shared_ptr<ITunerFilter>& in_filter) override;
+    ::ndk::ScopedAStatus start() override;
+    ::ndk::ScopedAStatus stop() override;
+    ::ndk::ScopedAStatus flush() override;
+    ::ndk::ScopedAStatus close() override;
 
-    Status configure(const TunerDvrSettings& settings) override;
-
-    Status attachFilter(const shared_ptr<ITunerFilter>& filter) override;
-
-    Status detachFilter(const shared_ptr<ITunerFilter>& filter) override;
-
-    Status start() override;
-
-    Status stop() override;
-
-    Status flush() override;
-
-    Status close() override;
-
-    struct DvrCallback : public IDvrCallback {
+    struct DvrCallback : public BnDvrCallback {
         DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
-                : mTunerDvrCallback(tunerDvrCallback) {};
+              : mTunerDvrCallback(tunerDvrCallback){};
 
-        virtual Return<void> onRecordStatus(const RecordStatus status);
-        virtual Return<void> onPlaybackStatus(const PlaybackStatus status);
+        ::ndk::ScopedAStatus onRecordStatus(const RecordStatus status) override;
+        ::ndk::ScopedAStatus onPlaybackStatus(const PlaybackStatus status) override;
 
-        private:
-            shared_ptr<ITunerDvrCallback> mTunerDvrCallback;
+    private:
+        shared_ptr<ITunerDvrCallback> mTunerDvrCallback;
     };
 
 private:
-    DvrSettings getHidlDvrSettingsFromAidl(TunerDvrSettings settings);
-
-    sp<IDvr> mDvr;
+    shared_ptr<IDvr> mDvr;
     DvrType mType;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERDVR_H
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 039fd31..e8c7767 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -18,893 +18,469 @@
 
 #include "TunerFilter.h"
 
-using ::aidl::android::media::tv::tuner::TunerFilterSectionCondition;
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <binder/IPCThreadState.h>
 
-using ::android::hardware::hidl_handle;
-using ::android::hardware::tv::tuner::V1_0::DemuxAlpLengthType;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
-using ::android::hardware::tv::tuner::V1_0::DemuxIpAddress;
-using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxMmtpPid;
-using ::android::hardware::tv::tuner::V1_0::DemuxRecordScIndexType;
-using ::android::hardware::tv::tuner::V1_0::DemuxStreamId;
-using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
-using ::android::hardware::tv::tuner::V1_0::Result;
-using ::android::hardware::tv::tuner::V1_1::AudioStreamType;
-using ::android::hardware::tv::tuner::V1_1::Constant;
-using ::android::hardware::tv::tuner::V1_1::VideoStreamType;
+#include "TunerHelper.h"
+#include "TunerService.h"
 
+using ::aidl::android::hardware::tv::tuner::Result;
+
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+using ::android::IPCThreadState;
 
 using namespace std;
 
-TunerFilter::TunerFilter(
-        sp<IFilter> filter, int mainType, int subType) {
-    mFilter = filter;
-    mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
-    mMainType = mainType;
-    mSubType = subType;
-}
+TunerFilter::TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb,
+                         DemuxFilterType type)
+      : mFilter(filter),
+        mType(type),
+        mStarted(false),
+        mShared(false),
+        mClientPid(-1),
+        mFilterCallback(cb) {}
 
 TunerFilter::~TunerFilter() {
+    Mutex::Autolock _l(mLock);
     mFilter = nullptr;
-    mFilter_1_1 = nullptr;
 }
 
-Status TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
-    if (mFilter == NULL) {
-        ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    MQDesc filterMQDesc;
-    Result res;
-    mFilter->getQueueDesc([&](Result r, const MQDesc& desc) {
-        filterMQDesc = desc;
-        res = r;
-    });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    AidlMQDesc aidlMQDesc;
-    unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(
-                filterMQDesc,  &aidlMQDesc);
-    *_aidl_return = move(aidlMQDesc);
-    return Status::ok();
-}
-
-Status TunerFilter::getId(int32_t* _aidl_return) {
+::ndk::ScopedAStatus TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res;
-    mFilter->getId([&](Result r, uint32_t filterId) {
-        res = r;
-        mId = filterId;
-    });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
     }
-    *_aidl_return = mId;
-    return Status::ok();
+
+    return mFilter->getQueueDesc(_aidl_return);
 }
 
-Status TunerFilter::getId64Bit(int64_t* _aidl_return) {
-    if (mFilter_1_1 == nullptr) {
-        ALOGE("IFilter_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    Result res;
-    mFilter_1_1->getId64Bit([&](Result r, uint64_t filterId) {
-        res = r;
-        mId64Bit = filterId;
-    });
-    if (res != Result::SUCCESS) {
-        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    *_aidl_return = mId64Bit;
-    return Status::ok();
-}
-
-Status TunerFilter::configure(const TunerFilterConfiguration& config) {
+::ndk::ScopedAStatus TunerFilter::getId(int32_t* _aidl_return) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    DemuxFilterSettings settings;
-    switch (config.getTag()) {
-        case TunerFilterConfiguration::ts: {
-            getHidlTsSettings(config, settings);
-            break;
-        }
-        case TunerFilterConfiguration::mmtp: {
-            getHidlMmtpSettings(config, settings);
-            break;
-        }
-        case TunerFilterConfiguration::ip: {
-            getHidlIpSettings(config, settings);
-            break;
-        }
-        case TunerFilterConfiguration::tlv: {
-            getHidlTlvSettings(config, settings);
-            break;
-        }
-        case TunerFilterConfiguration::alp: {
-            getHidlAlpSettings(config, settings);
-            break;
-        }
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
     }
 
-    Result res = mFilter->configure(settings);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    auto status = mFilter->getId(&mId);
+    if (status.isOk()) {
+        *_aidl_return = mId;
     }
-    return Status::ok();
+    return status;
 }
 
-Status TunerFilter::configureMonitorEvent(int monitorEventType) {
-    if (mFilter_1_1 == nullptr) {
-        ALOGE("IFilter_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    Result res = mFilter_1_1->configureMonitorEvent(monitorEventType);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
-}
-
-Status TunerFilter::configureIpFilterContextId(int cid) {
-    if (mFilter_1_1 == nullptr) {
-        ALOGE("IFilter_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    Result res = mFilter_1_1->configureIpCid(cid);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
-}
-
-Status TunerFilter::configureAvStreamType(int avStreamType) {
-    if (mFilter_1_1 == nullptr) {
-        ALOGE("IFilter_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    AvStreamType type;
-    if (!getHidlAvStreamType(avStreamType, type)) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_STATE));
-    }
-
-    Result res = mFilter_1_1->configureAvStreamType(type);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
-}
-
-Status TunerFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
+::ndk::ScopedAStatus TunerFilter::getId64Bit(int64_t* _aidl_return) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    ITunerFilter* tunerFilter = filter.get();
-    sp<IFilter> hidlFilter = static_cast<TunerFilter*>(tunerFilter)->getHalFilter();
-    Result res = mFilter->setDataSource(hidlFilter);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
-}
-
-void TunerFilter::getHidlTsSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
-    auto tsConf = config.get<TunerFilterConfiguration::ts>();
-    DemuxTsFilterSettings ts{
-        .tpid = static_cast<uint16_t>(tsConf.tpid),
-    };
-
-    TunerFilterSettings tunerSettings = tsConf.filterSettings;
-    switch (tunerSettings.getTag()) {
-        case TunerFilterSettings::av: {
-            ts.filterSettings.av(getAvSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::section: {
-            ts.filterSettings.section(getSectionSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::pesData: {
-            ts.filterSettings.pesData(getPesDataSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::record: {
-            ts.filterSettings.record(getRecordSettings(tunerSettings));
-            break;
-        }
-        default: {
-            ts.filterSettings.noinit();
-            break;
-        }
-    }
-    settings.ts(ts);
-}
-
-void TunerFilter::getHidlMmtpSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
-    auto mmtpConf = config.get<TunerFilterConfiguration::mmtp>();
-    DemuxMmtpFilterSettings mmtp{
-        .mmtpPid = static_cast<DemuxMmtpPid>(mmtpConf.mmtpPid),
-    };
-
-    TunerFilterSettings tunerSettings = mmtpConf.filterSettings;
-    switch (tunerSettings.getTag()) {
-        case TunerFilterSettings::av: {
-            mmtp.filterSettings.av(getAvSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::section: {
-            mmtp.filterSettings.section(getSectionSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::pesData: {
-            mmtp.filterSettings.pesData(getPesDataSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::record: {
-            mmtp.filterSettings.record(getRecordSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::download: {
-            mmtp.filterSettings.download(getDownloadSettings(tunerSettings));
-            break;
-        }
-        default: {
-            mmtp.filterSettings.noinit();
-            break;
-        }
-    }
-    settings.mmtp(mmtp);
-}
-
-void TunerFilter::getHidlIpSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
-    auto ipConf = config.get<TunerFilterConfiguration::ip>();
-    DemuxIpAddress ipAddr{
-        .srcPort = static_cast<uint16_t>(ipConf.ipAddr.srcPort),
-        .dstPort = static_cast<uint16_t>(ipConf.ipAddr.dstPort),
-    };
-
-    ipConf.ipAddr.srcIpAddress.isIpV6
-            ? ipAddr.srcIpAddress.v6(getIpV6Address(ipConf.ipAddr.srcIpAddress))
-            : ipAddr.srcIpAddress.v4(getIpV4Address(ipConf.ipAddr.srcIpAddress));
-    ipConf.ipAddr.dstIpAddress.isIpV6
-            ? ipAddr.dstIpAddress.v6(getIpV6Address(ipConf.ipAddr.dstIpAddress))
-            : ipAddr.dstIpAddress.v4(getIpV4Address(ipConf.ipAddr.dstIpAddress));
-    DemuxIpFilterSettings ip{
-        .ipAddr = ipAddr,
-    };
-
-    TunerFilterSettings tunerSettings = ipConf.filterSettings;
-    switch (tunerSettings.getTag()) {
-        case TunerFilterSettings::section: {
-            ip.filterSettings.section(getSectionSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::isPassthrough: {
-            ip.filterSettings.bPassthrough(tunerSettings.isPassthrough);
-            break;
-        }
-        default: {
-            ip.filterSettings.noinit();
-            break;
-        }
-    }
-    settings.ip(ip);
-}
-
-hidl_array<uint8_t, IP_V6_LENGTH> TunerFilter::getIpV6Address(TunerDemuxIpAddress addr) {
-    hidl_array<uint8_t, IP_V6_LENGTH> ip;
-    if (addr.addr.size() != IP_V6_LENGTH) {
-        return ip;
-    }
-    copy(addr.addr.begin(), addr.addr.end(), ip.data());
-    return ip;
-}
-
-hidl_array<uint8_t, IP_V4_LENGTH> TunerFilter::getIpV4Address(TunerDemuxIpAddress addr) {
-    hidl_array<uint8_t, IP_V4_LENGTH> ip;
-    if (addr.addr.size() != IP_V4_LENGTH) {
-        return ip;
-    }
-    copy(addr.addr.begin(), addr.addr.end(), ip.data());
-    return ip;
-}
-
-void TunerFilter::getHidlTlvSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
-    auto tlvConf = config.get<TunerFilterConfiguration::tlv>();
-    DemuxTlvFilterSettings tlv{
-        .packetType = static_cast<uint8_t>(tlvConf.packetType),
-        .isCompressedIpPacket = tlvConf.isCompressedIpPacket,
-    };
-
-    TunerFilterSettings tunerSettings = tlvConf.filterSettings;
-    switch (tunerSettings.getTag()) {
-        case TunerFilterSettings::section: {
-            tlv.filterSettings.section(getSectionSettings(tunerSettings));
-            break;
-        }
-        case TunerFilterSettings::isPassthrough: {
-            tlv.filterSettings.bPassthrough(tunerSettings.isPassthrough);
-            break;
-        }
-        default: {
-            tlv.filterSettings.noinit();
-            break;
-        }
-    }
-    settings.tlv(tlv);
-}
-
-void TunerFilter::getHidlAlpSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings) {
-    auto alpConf = config.get<TunerFilterConfiguration::alp>();
-    DemuxAlpFilterSettings alp{
-        .packetType = static_cast<uint8_t>(alpConf.packetType),
-        .lengthType = static_cast<DemuxAlpLengthType>(alpConf.lengthType),
-    };
-
-    TunerFilterSettings tunerSettings = alpConf.filterSettings;
-    switch (tunerSettings.getTag()) {
-        case TunerFilterSettings::section: {
-            alp.filterSettings.section(getSectionSettings(tunerSettings));
-            break;
-        }
-        default: {
-            alp.filterSettings.noinit();
-            break;
-        }
-    }
-    settings.alp(alp);
-}
-
-DemuxFilterAvSettings TunerFilter::getAvSettings(const TunerFilterSettings& settings) {
-    DemuxFilterAvSettings av {
-        .isPassthrough = settings.get<TunerFilterSettings::av>().isPassthrough,
-    };
-    return av;
-}
-
-DemuxFilterSectionSettings TunerFilter::getSectionSettings(const TunerFilterSettings& settings) {
-    auto s = settings.get<TunerFilterSettings::section>();
-    DemuxFilterSectionSettings section{
-        .isCheckCrc = s.isCheckCrc,
-        .isRepeat = s.isRepeat,
-        .isRaw = s.isRaw,
-    };
-
-    switch (s.condition.getTag()) {
-        case TunerFilterSectionCondition::sectionBits: {
-            auto sectionBits = s.condition.get<TunerFilterSectionCondition::sectionBits>();
-            vector<uint8_t> filter(sectionBits.filter.begin(), sectionBits.filter.end());
-            vector<uint8_t> mask(sectionBits.mask.begin(), sectionBits.mask.end());
-            vector<uint8_t> mode(sectionBits.mode.begin(), sectionBits.mode.end());
-            section.condition.sectionBits({
-                .filter = filter,
-                .mask = mask,
-                .mode = mode,
-            });
-            break;
-        }
-        case TunerFilterSectionCondition::tableInfo: {
-            auto tableInfo = s.condition.get<TunerFilterSectionCondition::tableInfo>();
-            section.condition.tableInfo({
-                .tableId = static_cast<uint16_t>(tableInfo.tableId),
-                .version = static_cast<uint16_t>(tableInfo.version),
-            });
-            break;
-        }
-        default: {
-            break;
-        }
-    }
-    return section;
-}
-
-DemuxFilterPesDataSettings TunerFilter::getPesDataSettings(const TunerFilterSettings& settings) {
-    DemuxFilterPesDataSettings pes{
-        .streamId = static_cast<DemuxStreamId>(
-                settings.get<TunerFilterSettings::pesData>().streamId),
-        .isRaw = settings.get<TunerFilterSettings::pesData>().isRaw,
-    };
-    return pes;
-}
-
-DemuxFilterRecordSettings TunerFilter::getRecordSettings(const TunerFilterSettings& settings) {
-    auto r = settings.get<TunerFilterSettings::record>();
-    DemuxFilterRecordSettings record{
-        .tsIndexMask = static_cast<uint32_t>(r.tsIndexMask),
-        .scIndexType = static_cast<DemuxRecordScIndexType>(r.scIndexType),
-    };
-
-    switch (r.scIndexMask.getTag()) {
-        case TunerFilterScIndexMask::sc: {
-            record.scIndexMask.sc(static_cast<uint32_t>(
-                    r.scIndexMask.get<TunerFilterScIndexMask::sc>()));
-            break;
-        }
-        case TunerFilterScIndexMask::scHevc: {
-            record.scIndexMask.scHevc(static_cast<uint32_t>(
-                    r.scIndexMask.get<TunerFilterScIndexMask::scHevc>()));
-            break;
-        }
-    }
-    return record;
-}
-
-DemuxFilterDownloadSettings TunerFilter::getDownloadSettings(const TunerFilterSettings& settings) {
-    DemuxFilterDownloadSettings download {
-        .downloadId = static_cast<uint32_t>(
-                settings.get<TunerFilterSettings::download>().downloadId),
-    };
-    return download;
-}
-
-Status TunerFilter::getAvSharedHandleInfo(TunerFilterSharedHandleInfo* _aidl_return) {
-    if (mFilter_1_1 == nullptr) {
-        ALOGE("IFilter_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
     }
 
-    Result res;
-    mFilter_1_1->getAvSharedHandle([&](Result r, hidl_handle avMemory, uint64_t avMemSize) {
-        res = r;
-        if (res == Result::SUCCESS) {
-            TunerFilterSharedHandleInfo info{
-                .handle = dupToAidl(avMemory),
-                .size = static_cast<int64_t>(avMemSize),
-            };
-            *_aidl_return = move(info);
+    auto status = mFilter->getId64Bit(&mId64Bit);
+    if (status.isOk()) {
+        *_aidl_return = mId64Bit;
+    }
+    return status;
+}
+
+::ndk::ScopedAStatus TunerFilter::configure(const DemuxFilterSettings& in_settings) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    return mFilter->configure(in_settings);
+}
+
+::ndk::ScopedAStatus TunerFilter::configureMonitorEvent(int32_t monitorEventType) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    return mFilter->configureMonitorEvent(monitorEventType);
+}
+
+::ndk::ScopedAStatus TunerFilter::configureIpFilterContextId(int32_t cid) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    return mFilter->configureIpCid(cid);
+}
+
+::ndk::ScopedAStatus TunerFilter::configureAvStreamType(const AvStreamType& in_avStreamType) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    return mFilter->configureAvStreamType(in_avStreamType);
+}
+
+::ndk::ScopedAStatus TunerFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (filter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    shared_ptr<IFilter> halFilter = static_cast<TunerFilter*>(filter.get())->getHalFilter();
+    return mFilter->setDataSource(halFilter);
+}
+
+::ndk::ScopedAStatus TunerFilter::getAvSharedHandle(NativeHandle* out_avMemory,
+                                                    int64_t* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    return mFilter->getAvSharedHandle(out_avMemory, _aidl_return);
+}
+
+::ndk::ScopedAStatus TunerFilter::releaseAvHandle(const NativeHandle& in_handle,
+                                                  int64_t in_avDataId) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    return mFilter->releaseAvHandle(in_handle, in_avDataId);
+}
+
+::ndk::ScopedAStatus TunerFilter::start() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    auto res = mFilter->start();
+    if (res.isOk()) {
+        mStarted = true;
+    }
+    return res;
+}
+
+::ndk::ScopedAStatus TunerFilter::stop() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    auto res = mFilter->stop();
+    mStarted = false;
+
+    return res;
+}
+
+::ndk::ScopedAStatus TunerFilter::flush() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    return mFilter->flush();
+}
+
+::ndk::ScopedAStatus TunerFilter::close() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            if (mFilterCallback != nullptr) {
+                mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
+                mFilterCallback->detachSharedFilterCallback();
+            }
+            TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
         } else {
-            _aidl_return = NULL;
+            // Calling from shared process, do not really close this filter.
+            if (mFilterCallback != nullptr) {
+                mFilterCallback->detachSharedFilterCallback();
+            }
+            mStarted = false;
+            return ::ndk::ScopedAStatus::ok();
         }
-    });
-
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
     }
-    return Status::ok();
+
+    if (mFilterCallback != nullptr) {
+        mFilterCallback->detachCallbacks();
+    }
+    auto res = mFilter->close();
+    mFilter = nullptr;
+    mStarted = false;
+    mShared = false;
+    mClientPid = -1;
+
+    return res;
 }
 
-Status TunerFilter::releaseAvHandle(
-        const ::aidl::android::hardware::common::NativeHandle& handle, int64_t avDataId) {
+::ndk::ScopedAStatus TunerFilter::acquireSharedFilterToken(string* _aidl_return) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mFilter->releaseAvHandle(hidl_handle(makeFromAidl(handle)), avDataId);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    if (mShared || mStarted) {
+        ALOGD("create SharedFilter in wrong state");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
     }
-    return Status::ok();
+
+    IPCThreadState* ipc = IPCThreadState::self();
+    mClientPid = ipc->getCallingPid();
+    string token = TunerService::getTunerService()->addFilterToShared(this->ref<TunerFilter>());
+    _aidl_return->assign(token);
+    mShared = true;
+
+    return ::ndk::ScopedAStatus::ok();
 }
 
-Status TunerFilter::start() {
+::ndk::ScopedAStatus TunerFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
-    Result res = mFilter->start();
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+
+    if (!mShared) {
+        // The filter is not shared or the shared filter has been closed.
+        return ::ndk::ScopedAStatus::ok();
     }
-    return Status::ok();
+
+    if (mFilterCallback != nullptr) {
+        mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
+        mFilterCallback->detachSharedFilterCallback();
+    }
+
+    TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+    mShared = false;
+
+    return ::ndk::ScopedAStatus::ok();
 }
 
-Status TunerFilter::stop() {
+::ndk::ScopedAStatus TunerFilter::getFilterType(DemuxFilterType* _aidl_return) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
-    Result res = mFilter->stop();
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+
+    *_aidl_return = mType;
+    return ::ndk::ScopedAStatus::ok();
 }
 
-Status TunerFilter::flush() {
+::ndk::ScopedAStatus TunerFilter::setDelayHint(const FilterDelayHint& in_hint) {
+    Mutex::Autolock _l(mLock);
     if (mFilter == nullptr) {
         ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
-    Result res = mFilter->flush();
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+
+    return mFilter->setDelayHint(in_hint);
 }
 
-Status TunerFilter::close() {
-    if (mFilter == nullptr) {
-        ALOGE("IFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-    Result res = mFilter->close();
-    mFilter = NULL;
-    mFilter_1_1 = NULL;
-
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+bool TunerFilter::isSharedFilterAllowed(int callingPid) {
+    return mShared && mClientPid != callingPid;
 }
 
-sp<IFilter> TunerFilter::getHalFilter() {
+void TunerFilter::attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb) {
+    if (mFilterCallback != nullptr) {
+        mFilterCallback->attachSharedFilterCallback(in_cb);
+    }
+}
+
+shared_ptr<IFilter> TunerFilter::getHalFilter() {
     return mFilter;
 }
 
-bool TunerFilter::isAudioFilter() {
-    return (mMainType == (int)DemuxFilterMainType::TS
-                    && mSubType == (int)DemuxTsFilterType::AUDIO)
-            || (mMainType == (int)DemuxFilterMainType::MMTP
-                    && mSubType == (int)DemuxMmtpFilterType::AUDIO);
-}
-
-bool TunerFilter::isVideoFilter() {
-    return (mMainType == (int)DemuxFilterMainType::TS
-                    && mSubType == (int)DemuxTsFilterType::VIDEO)
-            || (mMainType == (int)DemuxFilterMainType::MMTP
-                    && mSubType == (int)DemuxMmtpFilterType::VIDEO);
-}
-
-bool TunerFilter::getHidlAvStreamType(int avStreamType, AvStreamType& type) {
-    if (isAudioFilter()) {
-        type.audio(static_cast<AudioStreamType>(avStreamType));
-        return true;
-    }
-
-    if (isVideoFilter()) {
-        type.video(static_cast<VideoStreamType>(avStreamType));
-        return true;
-    }
-
-    return false;
-}
-
 /////////////// FilterCallback ///////////////////////
-
-Return<void> TunerFilter::FilterCallback::onFilterStatus(DemuxFilterStatus status) {
-    if (mTunerFilterCallback != NULL) {
-        mTunerFilterCallback->onFilterStatus((int)status);
+::ndk::ScopedAStatus TunerFilter::FilterCallback::onFilterStatus(DemuxFilterStatus status) {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr) {
+        mTunerFilterCallback->onFilterStatus(status);
     }
-    return Void();
+    return ::ndk::ScopedAStatus::ok();
 }
 
-Return<void> TunerFilter::FilterCallback::onFilterEvent(const DemuxFilterEvent& filterEvent) {
-    vector<DemuxFilterEventExt::Event> emptyEventsExt;
-    DemuxFilterEventExt emptyFilterEventExt {
-            .events = emptyEventsExt,
-    };
-    onFilterEvent_1_1(filterEvent, emptyFilterEventExt);
-    return Void();
+::ndk::ScopedAStatus TunerFilter::FilterCallback::onFilterEvent(
+        const vector<DemuxFilterEvent>& events) {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr) {
+        mTunerFilterCallback->onFilterEvent(events);
+    }
+    return ::ndk::ScopedAStatus::ok();
 }
 
-Return<void> TunerFilter::FilterCallback::onFilterEvent_1_1(const DemuxFilterEvent& filterEvent,
-        const DemuxFilterEventExt& filterEventExt) {
-    if (mTunerFilterCallback != NULL) {
-        vector<DemuxFilterEvent::Event> events = filterEvent.events;
-        vector<DemuxFilterEventExt::Event> eventsExt = filterEventExt.events;
-        vector<TunerFilterEvent> tunerEvent;
-
-        getAidlFilterEvent(events, eventsExt, tunerEvent);
-        mTunerFilterCallback->onFilterEvent(tunerEvent);
-    }
-    return Void();
-}
-
-/////////////// FilterCallback Helper Methods ///////////////////////
-
-void TunerFilter::FilterCallback::getAidlFilterEvent(vector<DemuxFilterEvent::Event>& events,
-        vector<DemuxFilterEventExt::Event>& eventsExt,
-        vector<TunerFilterEvent>& tunerEvent) {
-    if (events.empty() && !eventsExt.empty()) {
-        auto eventExt = eventsExt[0];
-        switch (eventExt.getDiscriminator()) {
-            case DemuxFilterEventExt::Event::hidl_discriminator::monitorEvent: {
-                getMonitorEvent(eventsExt, tunerEvent);
-                return;
-            }
-            case DemuxFilterEventExt::Event::hidl_discriminator::startId: {
-                getRestartEvent(eventsExt, tunerEvent);
-                return;
-            }
-            default: {
-                break;
-            }
-        }
-        return;
-    }
-
-    if (!events.empty()) {
-        auto event = events[0];
-        switch (event.getDiscriminator()) {
-            case DemuxFilterEvent::Event::hidl_discriminator::media: {
-                getMediaEvent(events, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::section: {
-                getSectionEvent(events, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::pes: {
-                getPesEvent(events, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::tsRecord: {
-                getTsRecordEvent(events, eventsExt, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::mmtpRecord: {
-                getMmtpRecordEvent(events, eventsExt, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::download: {
-                getDownloadEvent(events, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::ipPayload: {
-                getIpPayloadEvent(events, tunerEvent);
-                break;
-            }
-            case DemuxFilterEvent::Event::hidl_discriminator::temi: {
-                getTemiEvent(events, tunerEvent);
-                break;
-            }
-            default: {
-                break;
-            }
-        }
+void TunerFilter::FilterCallback::sendSharedFilterStatus(int32_t status) {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr && mOriginalCallback != nullptr) {
+        mTunerFilterCallback->onFilterStatus(static_cast<DemuxFilterStatus>(status));
     }
 }
 
-void TunerFilter::FilterCallback::getMediaEvent(
-        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
-    for (DemuxFilterEvent::Event e : events) {
-        DemuxFilterMediaEvent mediaEvent = e.media();
-        TunerFilterMediaEvent tunerMedia;
+void TunerFilter::FilterCallback::attachSharedFilterCallback(
+        const shared_ptr<ITunerFilterCallback>& in_cb) {
+    Mutex::Autolock _l(mCallbackLock);
+    mOriginalCallback = mTunerFilterCallback;
+    mTunerFilterCallback = in_cb;
+}
 
-        tunerMedia.streamId = static_cast<char16_t>(mediaEvent.streamId);
-        tunerMedia.isPtsPresent = mediaEvent.isPtsPresent;
-        tunerMedia.pts = static_cast<long>(mediaEvent.pts);
-        tunerMedia.dataLength = static_cast<int>(mediaEvent.dataLength);
-        tunerMedia.offset = static_cast<int>(mediaEvent.offset);
-        tunerMedia.isSecureMemory = mediaEvent.isSecureMemory;
-        tunerMedia.avDataId = static_cast<long>(mediaEvent.avDataId);
-        tunerMedia.mpuSequenceNumber = static_cast<int>(mediaEvent.mpuSequenceNumber);
-        tunerMedia.isPesPrivateData = mediaEvent.isPesPrivateData;
-
-        if (mediaEvent.extraMetaData.getDiscriminator() ==
-                DemuxFilterMediaEvent::ExtraMetaData::hidl_discriminator::audio) {
-            tunerMedia.isAudioExtraMetaData = true;
-            tunerMedia.audio = {
-                .adFade = static_cast<int8_t>(
-                        mediaEvent.extraMetaData.audio().adFade),
-                .adPan = static_cast<int8_t>(
-                        mediaEvent.extraMetaData.audio().adPan),
-                .versionTextTag = static_cast<int8_t>(
-                        mediaEvent.extraMetaData.audio().versionTextTag),
-                .adGainCenter = static_cast<int8_t>(
-                        mediaEvent.extraMetaData.audio().adGainCenter),
-                .adGainFront = static_cast<int8_t>(
-                        mediaEvent.extraMetaData.audio().adGainFront),
-                .adGainSurround = static_cast<int8_t>(
-                        mediaEvent.extraMetaData.audio().adGainSurround),
-            };
-        } else {
-            tunerMedia.isAudioExtraMetaData = false;
-        }
-
-        if (mediaEvent.avMemory.getNativeHandle() != nullptr) {
-            tunerMedia.avMemory = dupToAidl(mediaEvent.avMemory.getNativeHandle());
-        }
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::media>(move(tunerMedia));
-        res.push_back(move(tunerEvent));
+void TunerFilter::FilterCallback::detachSharedFilterCallback() {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr && mOriginalCallback != nullptr) {
+        mTunerFilterCallback = mOriginalCallback;
+        mOriginalCallback = nullptr;
     }
 }
 
-void TunerFilter::FilterCallback::getSectionEvent(
-        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
-    for (DemuxFilterEvent::Event e : events) {
-        DemuxFilterSectionEvent sectionEvent = e.section();
-        TunerFilterSectionEvent tunerSection;
-
-        tunerSection.tableId = static_cast<char16_t>(sectionEvent.tableId);
-        tunerSection.version = static_cast<char16_t>(sectionEvent.version);
-        tunerSection.sectionNum = static_cast<char16_t>(sectionEvent.sectionNum);
-        tunerSection.dataLength = static_cast<char16_t>(sectionEvent.dataLength);
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::section>(move(tunerSection));
-        res.push_back(move(tunerEvent));
-    }
+void TunerFilter::FilterCallback::detachCallbacks() {
+    Mutex::Autolock _l(mCallbackLock);
+    mOriginalCallback = nullptr;
+    mTunerFilterCallback = nullptr;
 }
 
-void TunerFilter::FilterCallback::getPesEvent(
-        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
-    for (DemuxFilterEvent::Event e : events) {
-        DemuxFilterPesEvent pesEvent = e.pes();
-        TunerFilterPesEvent tunerPes;
-
-        tunerPes.streamId = static_cast<char16_t>(pesEvent.streamId);
-        tunerPes.dataLength = static_cast<char16_t>(pesEvent.dataLength);
-        tunerPes.mpuSequenceNumber = static_cast<int>(pesEvent.mpuSequenceNumber);
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::pes>(move(tunerPes));
-        res.push_back(move(tunerEvent));
-    }
-}
-
-void TunerFilter::FilterCallback::getTsRecordEvent(vector<DemuxFilterEvent::Event>& events,
-        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
-    for (int i = 0; i < events.size(); i++) {
-        TunerFilterTsRecordEvent tunerTsRecord;
-        DemuxFilterTsRecordEvent tsRecordEvent = events[i].tsRecord();
-
-        TunerFilterScIndexMask scIndexMask;
-        if (tsRecordEvent.scIndexMask.getDiscriminator()
-                == DemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::sc) {
-            scIndexMask.set<TunerFilterScIndexMask::sc>(
-                    static_cast<int>(tsRecordEvent.scIndexMask.sc()));
-        } else if (tsRecordEvent.scIndexMask.getDiscriminator()
-                == DemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::scHevc) {
-            scIndexMask.set<TunerFilterScIndexMask::scHevc>(
-                    static_cast<int>(tsRecordEvent.scIndexMask.scHevc()));
-        }
-
-        if (tsRecordEvent.pid.getDiscriminator() == DemuxPid::hidl_discriminator::tPid) {
-            tunerTsRecord.pid = static_cast<char16_t>(tsRecordEvent.pid.tPid());
-        } else {
-            tunerTsRecord.pid = static_cast<char16_t>(Constant::INVALID_TS_PID);
-        }
-
-        tunerTsRecord.scIndexMask = scIndexMask;
-        tunerTsRecord.tsIndexMask = static_cast<int>(tsRecordEvent.tsIndexMask);
-        tunerTsRecord.byteNumber = static_cast<long>(tsRecordEvent.byteNumber);
-
-        if (eventsExt.size() > i && eventsExt[i].getDiscriminator() ==
-                    DemuxFilterEventExt::Event::hidl_discriminator::tsRecord) {
-            tunerTsRecord.isExtended = true;
-            tunerTsRecord.pts = static_cast<long>(eventsExt[i].tsRecord().pts);
-            tunerTsRecord.firstMbInSlice = static_cast<int>(eventsExt[i].tsRecord().firstMbInSlice);
-        } else {
-            tunerTsRecord.isExtended = false;
-        }
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::tsRecord>(move(tunerTsRecord));
-        res.push_back(move(tunerEvent));
-    }
-}
-
-void TunerFilter::FilterCallback::getMmtpRecordEvent(vector<DemuxFilterEvent::Event>& events,
-        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
-    for (int i = 0; i < events.size(); i++) {
-        TunerFilterMmtpRecordEvent tunerMmtpRecord;
-        DemuxFilterMmtpRecordEvent mmtpRecordEvent = events[i].mmtpRecord();
-
-        tunerMmtpRecord.scHevcIndexMask = static_cast<int>(mmtpRecordEvent.scHevcIndexMask);
-        tunerMmtpRecord.byteNumber = static_cast<long>(mmtpRecordEvent.byteNumber);
-
-        if (eventsExt.size() > i && eventsExt[i].getDiscriminator() ==
-                    DemuxFilterEventExt::Event::hidl_discriminator::mmtpRecord) {
-            tunerMmtpRecord.isExtended = true;
-            tunerMmtpRecord.pts = static_cast<long>(eventsExt[i].mmtpRecord().pts);
-            tunerMmtpRecord.mpuSequenceNumber =
-                    static_cast<int>(eventsExt[i].mmtpRecord().mpuSequenceNumber);
-            tunerMmtpRecord.firstMbInSlice =
-                    static_cast<int>(eventsExt[i].mmtpRecord().firstMbInSlice);
-            tunerMmtpRecord.tsIndexMask = static_cast<int>(eventsExt[i].mmtpRecord().tsIndexMask);
-        } else {
-            tunerMmtpRecord.isExtended = false;
-        }
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::mmtpRecord>(move(tunerMmtpRecord));
-        res.push_back(move(tunerEvent));
-    }
-}
-
-void TunerFilter::FilterCallback::getDownloadEvent(
-        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
-    for (DemuxFilterEvent::Event e : events) {
-        DemuxFilterDownloadEvent downloadEvent = e.download();
-        TunerFilterDownloadEvent tunerDownload;
-
-        tunerDownload.itemId = static_cast<int>(downloadEvent.itemId);
-        tunerDownload.itemFragmentIndex = static_cast<int>(downloadEvent.itemFragmentIndex);
-        tunerDownload.mpuSequenceNumber = static_cast<int>(downloadEvent.mpuSequenceNumber);
-        tunerDownload.lastItemFragmentIndex = static_cast<int>(downloadEvent.lastItemFragmentIndex);
-        tunerDownload.dataLength = static_cast<char16_t>(downloadEvent.dataLength);
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::download>(move(tunerDownload));
-        res.push_back(move(tunerEvent));
-    }
-}
-
-void TunerFilter::FilterCallback::getIpPayloadEvent(
-        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
-    for (DemuxFilterEvent::Event e : events) {
-        DemuxFilterIpPayloadEvent ipPayloadEvent = e.ipPayload();
-        TunerFilterIpPayloadEvent tunerIpPayload;
-
-        tunerIpPayload.dataLength = static_cast<char16_t>(ipPayloadEvent.dataLength);
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::ipPayload>(move(tunerIpPayload));
-        res.push_back(move(tunerEvent));
-    }
-}
-
-void TunerFilter::FilterCallback::getTemiEvent(
-        vector<DemuxFilterEvent::Event>& events, vector<TunerFilterEvent>& res) {
-    for (DemuxFilterEvent::Event e : events) {
-        DemuxFilterTemiEvent temiEvent = e.temi();
-        TunerFilterTemiEvent tunerTemi;
-
-        tunerTemi.pts = static_cast<long>(temiEvent.pts);
-        tunerTemi.descrTag = static_cast<int8_t>(temiEvent.descrTag);
-        vector<uint8_t> descrData = temiEvent.descrData;
-        tunerTemi.descrData.resize(descrData.size());
-        copy(descrData.begin(), descrData.end(), tunerTemi.descrData.begin());
-
-        TunerFilterEvent tunerEvent;
-        tunerEvent.set<TunerFilterEvent::temi>(move(tunerTemi));
-        res.push_back(move(tunerEvent));
-    }
-}
-
-void TunerFilter::FilterCallback::getMonitorEvent(
-        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
-    DemuxFilterMonitorEvent monitorEvent = eventsExt[0].monitorEvent();
-    TunerFilterMonitorEvent tunerMonitor;
-
-    switch (monitorEvent.getDiscriminator()) {
-        case DemuxFilterMonitorEvent::hidl_discriminator::scramblingStatus: {
-            tunerMonitor.set<TunerFilterMonitorEvent::scramblingStatus>(
-                    static_cast<int>(monitorEvent.scramblingStatus()));
-            break;
-        }
-        case DemuxFilterMonitorEvent::hidl_discriminator::cid: {
-            tunerMonitor.set<TunerFilterMonitorEvent::cid>(static_cast<int>(monitorEvent.cid()));
-            break;
-        }
-    }
-
-    TunerFilterEvent tunerEvent;
-    tunerEvent.set<TunerFilterEvent::monitor>(move(tunerMonitor));
-    res.push_back(move(tunerEvent));
-}
-
-void TunerFilter::FilterCallback::getRestartEvent(
-        vector<DemuxFilterEventExt::Event>& eventsExt, vector<TunerFilterEvent>& res) {
-    TunerFilterEvent tunerEvent;
-    tunerEvent.set<TunerFilterEvent::startId>(static_cast<int>(eventsExt[0].startId()));
-    res.push_back(move(tunerEvent));
-}
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index ff4728c..93d8898 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -17,176 +17,108 @@
 #ifndef ANDROID_MEDIA_TUNERFILTER_H
 #define ANDROID_MEDIA_TUNERFILTER_H
 
+#include <aidl/android/hardware/tv/tuner/AvStreamType.h>
+#include <aidl/android/hardware/tv/tuner/BnFilterCallback.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterEvent.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterStatus.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterType.h>
+#include <aidl/android/hardware/tv/tuner/FilterDelayHint.h>
+#include <aidl/android/hardware/tv/tuner/IFilter.h>
 #include <aidl/android/media/tv/tuner/BnTunerFilter.h>
 #include <aidl/android/media/tv/tuner/ITunerFilterCallback.h>
-#include <aidlcommonsupport/NativeHandle.h>
-#include <android/hardware/tv/tuner/1.0/ITuner.h>
-#include <android/hardware/tv/tuner/1.1/IFilter.h>
-#include <android/hardware/tv/tuner/1.1/IFilterCallback.h>
-#include <android/hardware/tv/tuner/1.1/types.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <fmq/ConvertMQDescriptors.h>
-#include <fmq/MessageQueue.h>
+#include <utils/Mutex.h>
 
-using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::hardware::common::NativeHandle;
 using ::aidl::android::hardware::common::fmq::MQDescriptor;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::hardware::tv::tuner::AvStreamType;
+using ::aidl::android::hardware::tv::tuner::BnFilterCallback;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterType;
+using ::aidl::android::hardware::tv::tuner::FilterDelayHint;
+using ::aidl::android::hardware::tv::tuner::IFilter;
 using ::aidl::android::media::tv::tuner::BnTunerFilter;
-using ::aidl::android::media::tv::tuner::ITunerFilterCallback;
-using ::aidl::android::media::tv::tuner::TunerDemuxIpAddress;
-using ::aidl::android::media::tv::tuner::TunerFilterConfiguration;
-using ::aidl::android::media::tv::tuner::TunerFilterDownloadEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterIpPayloadEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterMediaEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterMmtpRecordEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterMonitorEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterPesEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterScIndexMask;
-using ::aidl::android::media::tv::tuner::TunerFilterSectionEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterSharedHandleInfo;
-using ::aidl::android::media::tv::tuner::TunerFilterSettings;
-using ::aidl::android::media::tv::tuner::TunerFilterTemiEvent;
-using ::aidl::android::media::tv::tuner::TunerFilterTsRecordEvent;
-using ::android::hardware::MQDescriptorSync;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hardware::hidl_array;
-using ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterIpPayloadEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterMediaEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterMmtpRecordEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterPesDataSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterPesEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterRecordSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterTemiEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterTsRecordEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxIpFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxPid;
-using ::android::hardware::tv::tuner::V1_0::IFilter;
-using ::android::hardware::tv::tuner::V1_1::AvStreamType;
-using ::android::hardware::tv::tuner::V1_1::DemuxFilterEventExt;
-using ::android::hardware::tv::tuner::V1_1::DemuxFilterMonitorEvent;
-using ::android::hardware::tv::tuner::V1_1::DemuxFilterTsRecordEventExt;
-using ::android::hardware::tv::tuner::V1_1::IFilterCallback;
+using ::android::Mutex;
 
+using namespace std;
+
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-using MQDesc = MQDescriptorSync<uint8_t>;
 using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
 
-const static int IP_V4_LENGTH = 4;
-const static int IP_V6_LENGTH = 16;
-
 class TunerFilter : public BnTunerFilter {
 
 public:
-    TunerFilter(sp<IFilter> filter, int mainType, int subTyp);
-    virtual ~TunerFilter();
-    Status getId(int32_t* _aidl_return) override;
-    Status getId64Bit(int64_t* _aidl_return) override;
-    Status getQueueDesc(AidlMQDesc* _aidl_return) override;
-    Status configure(const TunerFilterConfiguration& config) override;
-    Status configureMonitorEvent(int monitorEventType) override;
-    Status configureIpFilterContextId(int cid) override;
-    Status configureAvStreamType(int avStreamType) override;
-    Status getAvSharedHandleInfo(TunerFilterSharedHandleInfo* _aidl_return) override;
-    Status releaseAvHandle(const ::aidl::android::hardware::common::NativeHandle& handle,
-            int64_t avDataId) override;
-    Status setDataSource(const std::shared_ptr<ITunerFilter>& filter) override;
-    Status start() override;
-    Status stop() override;
-    Status flush() override;
-    Status close() override;
-    sp<IFilter> getHalFilter();
+    class FilterCallback : public BnFilterCallback {
+    public:
+        FilterCallback(const shared_ptr<ITunerFilterCallback>& tunerFilterCallback)
+              : mTunerFilterCallback(tunerFilterCallback), mOriginalCallback(nullptr){};
 
-    struct FilterCallback : public IFilterCallback {
-        FilterCallback(const std::shared_ptr<ITunerFilterCallback> tunerFilterCallback)
-                : mTunerFilterCallback(tunerFilterCallback) {};
+        ::ndk::ScopedAStatus onFilterEvent(const vector<DemuxFilterEvent>& events) override;
+        ::ndk::ScopedAStatus onFilterStatus(DemuxFilterStatus status) override;
 
-        virtual Return<void> onFilterEvent(const DemuxFilterEvent& filterEvent);
-        virtual Return<void> onFilterEvent_1_1(const DemuxFilterEvent& filterEvent,
-                const DemuxFilterEventExt& filterEventExt);
-        virtual Return<void> onFilterStatus(DemuxFilterStatus status);
+        void sendSharedFilterStatus(int32_t status);
+        void attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb);
+        void detachSharedFilterCallback();
+        void detachCallbacks();
 
-        void getAidlFilterEvent(std::vector<DemuxFilterEvent::Event>& events,
-                std::vector<DemuxFilterEventExt::Event>& eventsExt,
-                std::vector<TunerFilterEvent>& tunerEvent);
-
-        void getMediaEvent(
-                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
-        void getSectionEvent(
-                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
-        void getPesEvent(
-                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
-        void getTsRecordEvent(
-                std::vector<DemuxFilterEvent::Event>& events,
-                std::vector<DemuxFilterEventExt::Event>& eventsExt,
-                std::vector<TunerFilterEvent>& res);
-        void getMmtpRecordEvent(
-                std::vector<DemuxFilterEvent::Event>& events,
-                std::vector<DemuxFilterEventExt::Event>& eventsExt,
-                std::vector<TunerFilterEvent>& res);
-        void getDownloadEvent(
-                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
-        void getIpPayloadEvent(
-                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
-        void getTemiEvent(
-                std::vector<DemuxFilterEvent::Event>& events, std::vector<TunerFilterEvent>& res);
-        void getMonitorEvent(
-                std::vector<DemuxFilterEventExt::Event>& eventsExt,
-                std::vector<TunerFilterEvent>& res);
-        void getRestartEvent(
-                std::vector<DemuxFilterEventExt::Event>& eventsExt,
-                std::vector<TunerFilterEvent>& res);
-
-        std::shared_ptr<ITunerFilterCallback> mTunerFilterCallback;
+    private:
+        shared_ptr<ITunerFilterCallback> mTunerFilterCallback;
+        shared_ptr<ITunerFilterCallback> mOriginalCallback;
+        Mutex mCallbackLock;
     };
 
+    TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb, DemuxFilterType type);
+    virtual ~TunerFilter();
+
+    ::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getId64Bit(int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getQueueDesc(AidlMQDesc* _aidl_return) override;
+    ::ndk::ScopedAStatus configure(const DemuxFilterSettings& in_settings) override;
+    ::ndk::ScopedAStatus configureMonitorEvent(int32_t in_monitorEventTypes) override;
+    ::ndk::ScopedAStatus configureIpFilterContextId(int32_t in_cid) override;
+    ::ndk::ScopedAStatus configureAvStreamType(const AvStreamType& in_avStreamType) override;
+    ::ndk::ScopedAStatus getAvSharedHandle(NativeHandle* out_avMemory,
+                                           int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus releaseAvHandle(const NativeHandle& in_handle,
+                                         int64_t in_avDataId) override;
+    ::ndk::ScopedAStatus setDataSource(const shared_ptr<ITunerFilter>& in_filter) override;
+    ::ndk::ScopedAStatus start() override;
+    ::ndk::ScopedAStatus stop() override;
+    ::ndk::ScopedAStatus flush() override;
+    ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus acquireSharedFilterToken(string* _aidl_return) override;
+    ::ndk::ScopedAStatus freeSharedFilterToken(const string& in_filterToken) override;
+    ::ndk::ScopedAStatus getFilterType(DemuxFilterType* _aidl_return) override;
+    ::ndk::ScopedAStatus setDelayHint(const FilterDelayHint& in_hint) override;
+
+    bool isSharedFilterAllowed(int32_t pid);
+    void attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb);
+    shared_ptr<IFilter> getHalFilter();
+
 private:
-    DemuxFilterAvSettings getAvSettings(const TunerFilterSettings& settings);
-    DemuxFilterSectionSettings getSectionSettings(const TunerFilterSettings& settings);
-    DemuxFilterPesDataSettings getPesDataSettings(const TunerFilterSettings& settings);
-    DemuxFilterRecordSettings getRecordSettings(const TunerFilterSettings& settings);
-    DemuxFilterDownloadSettings getDownloadSettings(const TunerFilterSettings& settings);
-
-    bool isAudioFilter();
-    bool isVideoFilter();
-    bool getHidlAvStreamType(int avStreamType, AvStreamType& type);
-
-    void getHidlTsSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
-    void getHidlMmtpSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
-    void getHidlIpSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
-    void getHidlTlvSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
-    void getHidlAlpSettings(
-        const TunerFilterConfiguration& config, DemuxFilterSettings& settings);
-
-    hidl_array<uint8_t, IP_V4_LENGTH> getIpV4Address(TunerDemuxIpAddress addr);
-    hidl_array<uint8_t, IP_V6_LENGTH> getIpV6Address(TunerDemuxIpAddress addr);
-
-    sp<IFilter> mFilter;
-    sp<::android::hardware::tv::tuner::V1_1::IFilter> mFilter_1_1;
+    shared_ptr<IFilter> mFilter;
     int32_t mId;
     int64_t mId64Bit;
-    int mMainType;
-    int mSubType;
+    DemuxFilterType mType;
+    bool mStarted;
+    bool mShared;
+    int32_t mClientPid;
+    shared_ptr<FilterCallback> mFilterCallback;
+    Mutex mLock;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERFILTER_H
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 74b5519..5116305 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -1,5 +1,5 @@
 /**
- * Copyright 2020, The Android Open Source Project
+ * Copyright 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,1081 +14,208 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
 #define LOG_TAG "TunerFrontend"
 
 #include "TunerFrontend.h"
+
+#include <aidl/android/hardware/tv/tuner/Result.h>
+
 #include "TunerLnb.h"
 
-using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3PlpSettings;
-using ::aidl::android::media::tv::tuner::TunerFrontendScanAtsc3PlpInfo;
-using ::aidl::android::media::tv::tuner::TunerFrontendStatusAtsc3PlpInfo;
-using ::aidl::android::media::tv::tuner::TunerFrontendUnionSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendAnalogSifStandard;
-using ::android::hardware::tv::tuner::V1_0::FrontendAnalogType;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtscModulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Bandwidth;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3CodeRate;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3DemodOutputFormat;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Fec;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Modulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3TimeInterleaveMode;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbcAnnex;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbcModulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbcOuterFec;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbcSpectralInversion;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsModulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsPilot;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsRolloff;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsStandard;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsVcmMode;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtBandwidth;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtCoderate;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtConstellation;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtGuardInterval;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtHierarchy;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtPlpMode;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtStandard;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbtTransmissionMode;
-using ::android::hardware::tv::tuner::V1_0::FrontendInnerFec;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Coderate;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Modulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Rolloff;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Settings;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsCoderate;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsModulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsRolloff;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsStreamIdType;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtBandwidth;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtCoderate;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtGuardInterval;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtMode;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtModulation;
-using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendModulationStatus;
-using ::android::hardware::tv::tuner::V1_0::FrontendScanAtsc3PlpInfo;
-using ::android::hardware::tv::tuner::V1_0::FrontendScanType;
-using ::android::hardware::tv::tuner::V1_0::FrontendStatusType;
-using ::android::hardware::tv::tuner::V1_0::Result;
-using ::android::hardware::tv::tuner::V1_1::FrontendAnalogAftFlag;
-using ::android::hardware::tv::tuner::V1_1::FrontendBandwidth;
-using ::android::hardware::tv::tuner::V1_1::FrontendCableTimeInterleaveMode;
-using ::android::hardware::tv::tuner::V1_1::FrontendDvbcBandwidth;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbBandwidth;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbCodeRate;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbGuardInterval;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbModulation;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbTimeInterleaveMode;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbTransmissionMode;
-using ::android::hardware::tv::tuner::V1_1::FrontendDvbsScanType;
-using ::android::hardware::tv::tuner::V1_1::FrontendGuardInterval;
-using ::android::hardware::tv::tuner::V1_1::FrontendInterleaveMode;
-using ::android::hardware::tv::tuner::V1_1::FrontendModulation;
-using ::android::hardware::tv::tuner::V1_1::FrontendRollOff;
-using ::android::hardware::tv::tuner::V1_1::FrontendTransmissionMode;
-using ::android::hardware::tv::tuner::V1_1::FrontendSpectralInversion;
-using ::android::hardware::tv::tuner::V1_1::FrontendStatusTypeExt1_1;
+using ::aidl::android::hardware::tv::tuner::Result;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-TunerFrontend::TunerFrontend(sp<IFrontend> frontend, int id) {
+TunerFrontend::TunerFrontend(shared_ptr<IFrontend> frontend, int id) {
     mFrontend = frontend;
-    mFrontend_1_1 = ::android::hardware::tv::tuner::V1_1::IFrontend::castFrom(mFrontend);
     mId = id;
 }
 
 TunerFrontend::~TunerFrontend() {
-    mFrontend = NULL;
-    mFrontend_1_1 = NULL;
+    mFrontend = nullptr;
     mId = -1;
 }
 
-Status TunerFrontend::setCallback(
+::ndk::ScopedAStatus TunerFrontend::setCallback(
         const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
-    if (mFrontend == NULL) {
+    if (mFrontend == nullptr) {
         ALOGE("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    if (tunerFrontendCallback == NULL) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    if (tunerFrontendCallback == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
 
-    sp<IFrontendCallback> frontendCallback = new FrontendCallback(tunerFrontendCallback);
-    Result status = mFrontend->setCallback(frontendCallback);
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    shared_ptr<IFrontendCallback> frontendCallback =
+            ::ndk::SharedRefBase::make<FrontendCallback>(tunerFrontendCallback);
+    return mFrontend->setCallback(frontendCallback);
 }
 
-Status TunerFrontend::tune(const TunerFrontendSettings& settings) {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::tune(const FrontendSettings& settings) {
+    if (mFrontend == nullptr) {
         ALOGE("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    FrontendSettings frontendSettings = getHidlFrontendSettings(settings);
-    if (settings.isExtended) {
-        if (mFrontend_1_1 == NULL) {
-            ALOGE("IFrontend_1_1 is not initialized");
-            return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-        }
-        FrontendSettingsExt1_1 frontendSettingsExt = getHidlFrontendSettingsExt(settings);
-        status = mFrontend_1_1->tune_1_1(frontendSettings, frontendSettingsExt);
-    } else {
-        status = mFrontend->tune(frontendSettings);
-    }
-
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return mFrontend->tune(settings);
 }
 
-Status TunerFrontend::stopTune() {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::stopTune() {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mFrontend->stopTune();
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return mFrontend->stopTune();
 }
 
-Status TunerFrontend::scan(const TunerFrontendSettings& settings, int frontendScanType) {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::scan(const FrontendSettings& settings,
+                                         FrontendScanType frontendScanType) {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    FrontendSettings frontendSettings = getHidlFrontendSettings(settings);
-    if (settings.isExtended) {
-        if (mFrontend_1_1 == NULL) {
-            ALOGE("IFrontend_1_1 is not initialized");
-            return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-        }
-        FrontendSettingsExt1_1 frontendSettingsExt = getHidlFrontendSettingsExt(settings);
-        status = mFrontend_1_1->scan_1_1(frontendSettings,
-                static_cast<FrontendScanType>(frontendScanType), frontendSettingsExt);
-    } else {
-        status = mFrontend->scan(
-                frontendSettings, static_cast<FrontendScanType>(frontendScanType));
-    }
-
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return mFrontend->scan(settings, frontendScanType);
 }
 
-Status TunerFrontend::stopScan() {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::stopScan() {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mFrontend->stopScan();
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return mFrontend->stopScan();
 }
 
-Status TunerFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mFrontend->setLnb(static_cast<TunerLnb*>(lnb.get())->getId());
-    if (status == Result::SUCCESS) {
-        return Status::ok();
+    if (lnb == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
 
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return mFrontend->setLnb(static_cast<TunerLnb*>(lnb.get())->getId());
 }
 
-Status TunerFrontend::setLna(bool bEnable) {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::linkCiCamToFrontend(int32_t ciCamId, int32_t* _aidl_return) {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mFrontend->setLna(bEnable);
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    return mFrontend->linkCiCam(ciCamId, _aidl_return);
 }
 
-Status TunerFrontend::linkCiCamToFrontend(int ciCamId, int32_t* _aidl_return) {
-    if (mFrontend_1_1 == NULL) {
-        ALOGD("IFrontend_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    int ltsId;
-    Result status;
-    mFrontend_1_1->linkCiCam(static_cast<uint32_t>(ciCamId),
-            [&](Result r, uint32_t id) {
-                status = r;
-                ltsId = id;
-            });
-
-    if (status == Result::SUCCESS) {
-        *_aidl_return = ltsId;
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-}
-
-Status TunerFrontend::unlinkCiCamToFrontend(int ciCamId) {
-    if (mFrontend_1_1 == NULL) {
-        ALOGD("IFrontend_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    Result status = mFrontend_1_1->unlinkCiCam(ciCamId);
-    if (status == Result::SUCCESS) {
-        return Status::ok();
-    }
-
-    return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-}
-
-Status TunerFrontend::close() {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::unlinkCiCamToFrontend(int32_t ciCamId) {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mFrontend->close();
-    mFrontend = NULL;
-    mFrontend_1_1 = NULL;
-
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mFrontend->unlinkCiCam(ciCamId);
 }
 
-Status TunerFrontend::getStatus(const vector<int32_t>& statusTypes,
-        vector<TunerFrontendStatus>* _aidl_return) {
-    if (mFrontend == NULL) {
+::ndk::ScopedAStatus TunerFrontend::close() {
+    if (mFrontend == nullptr) {
         ALOGD("IFrontend is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res;
-    vector<FrontendStatus> status;
-    vector<FrontendStatusType> types;
-    for (auto s : statusTypes) {
-        types.push_back(static_cast<FrontendStatusType>(s));
-    }
+    auto res = mFrontend->close();
+    mFrontend = nullptr;
 
-    mFrontend->getStatus(types, [&](Result r, const hidl_vec<FrontendStatus>& s) {
-        res = r;
-        status = s;
-    });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    getAidlFrontendStatus(status, *_aidl_return);
-    return Status::ok();
+    return res;
 }
 
-Status TunerFrontend::getStatusExtended_1_1(const vector<int32_t>& statusTypes,
-        vector<TunerFrontendStatus>* _aidl_return) {
-    if (mFrontend_1_1 == NULL) {
-        ALOGD("IFrontend_1_1 is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+::ndk::ScopedAStatus TunerFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
+                                              vector<FrontendStatus>* _aidl_return) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res;
-    vector<FrontendStatusExt1_1> status;
-    vector<FrontendStatusTypeExt1_1> types;
-    for (auto s : statusTypes) {
-        types.push_back(static_cast<FrontendStatusTypeExt1_1>(s));
-    }
-
-    mFrontend_1_1->getStatusExt1_1(types, [&](Result r, const hidl_vec<FrontendStatusExt1_1>& s) {
-        res = r;
-        status = s;
-    });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    getAidlFrontendStatusExt(status, *_aidl_return);
-    return Status::ok();
+    return mFrontend->getStatus(in_statusTypes, _aidl_return);
 }
 
-Status TunerFrontend::getFrontendId(int* _aidl_return) {
+::ndk::ScopedAStatus TunerFrontend::getFrontendId(int32_t* _aidl_return) {
     *_aidl_return = mId;
-    return Status::ok();
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerFrontend::getHardwareInfo(std::string* _aidl_return) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mFrontend->getHardwareInfo(_aidl_return);
+}
+
+::ndk::ScopedAStatus TunerFrontend::removeOutputPid(int32_t in_pid) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mFrontend->removeOutputPid(in_pid);
+}
+
+::ndk::ScopedAStatus TunerFrontend::getFrontendStatusReadiness(
+        const std::vector<FrontendStatusType>& in_statusTypes,
+        std::vector<FrontendStatusReadiness>* _aidl_return) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mFrontend->getFrontendStatusReadiness(in_statusTypes, _aidl_return);
 }
 
 /////////////// FrontendCallback ///////////////////////
-
-Return<void> TunerFrontend::FrontendCallback::onEvent(FrontendEventType frontendEventType) {
-    ALOGD("FrontendCallback::onEvent, type=%d", frontendEventType);
-    mTunerFrontendCallback->onEvent((int)frontendEventType);
-    return Void();
+::ndk::ScopedAStatus TunerFrontend::FrontendCallback::onEvent(FrontendEventType frontendEventType) {
+    ALOGV("FrontendCallback::onEvent, type=%d", frontendEventType);
+    if (mTunerFrontendCallback != nullptr) {
+        mTunerFrontendCallback->onEvent(frontendEventType);
+    }
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<void> TunerFrontend::FrontendCallback::onScanMessage(
+::ndk::ScopedAStatus TunerFrontend::FrontendCallback::onScanMessage(
         FrontendScanMessageType type, const FrontendScanMessage& message) {
-    ALOGD("FrontendCallback::onScanMessage, type=%d", type);
-    TunerFrontendScanMessage scanMessage;
-    switch(type) {
-        case FrontendScanMessageType::LOCKED: {
-            scanMessage.set<TunerFrontendScanMessage::isLocked>(message.isLocked());
-            break;
-        }
-        case FrontendScanMessageType::END: {
-            scanMessage.set<TunerFrontendScanMessage::isEnd>(message.isEnd());
-            break;
-        }
-        case FrontendScanMessageType::PROGRESS_PERCENT: {
-            scanMessage.set<TunerFrontendScanMessage::progressPercent>(message.progressPercent());
-            break;
-        }
-        case FrontendScanMessageType::FREQUENCY: {
-            auto f = message.frequencies();
-            vector<int> frequencies(begin(f), end(f));
-            scanMessage.set<TunerFrontendScanMessage::frequencies>(frequencies);
-            break;
-        }
-        case FrontendScanMessageType::SYMBOL_RATE: {
-            auto s = message.symbolRates();
-            vector<int> symbolRates(begin(s), end(s));
-            scanMessage.set<TunerFrontendScanMessage::symbolRates>(symbolRates);
-            break;
-        }
-        case FrontendScanMessageType::HIERARCHY: {
-            scanMessage.set<TunerFrontendScanMessage::hierarchy>((int)message.hierarchy());
-            break;
-        }
-        case FrontendScanMessageType::ANALOG_TYPE: {
-            scanMessage.set<TunerFrontendScanMessage::analogType>((int)message.analogType());
-            break;
-        }
-        case FrontendScanMessageType::PLP_IDS: {
-            auto p = message.plpIds();
-            vector<uint8_t> plpIds(begin(p), end(p));
-            scanMessage.set<TunerFrontendScanMessage::plpIds>(plpIds);
-            break;
-        }
-        case FrontendScanMessageType::GROUP_IDS: {
-            auto g = message.groupIds();
-            vector<uint8_t> groupIds(begin(g), end(g));
-            scanMessage.set<TunerFrontendScanMessage::groupIds>(groupIds);
-            break;
-        }
-        case FrontendScanMessageType::INPUT_STREAM_IDS: {
-            auto i = message.inputStreamIds();
-            vector<char16_t> streamIds(begin(i), end(i));
-            scanMessage.set<TunerFrontendScanMessage::inputStreamIds>(streamIds);
-            break;
-        }
-        case FrontendScanMessageType::STANDARD: {
-            FrontendScanMessage::Standard std = message.std();
-            int standard;
-            if (std.getDiscriminator() == FrontendScanMessage::Standard::hidl_discriminator::sStd) {
-                standard = (int) std.sStd();
-            } else if (std.getDiscriminator() ==
-                    FrontendScanMessage::Standard::hidl_discriminator::tStd) {
-                standard = (int) std.tStd();
-            } else if (std.getDiscriminator() ==
-                    FrontendScanMessage::Standard::hidl_discriminator::sifStd) {
-                standard = (int) std.sifStd();
-            }
-            scanMessage.set<TunerFrontendScanMessage::std>(standard);
-            break;
-        }
-        case FrontendScanMessageType::ATSC3_PLP_INFO: {
-            vector<FrontendScanAtsc3PlpInfo> plpInfos = message.atsc3PlpInfos();
-            vector<TunerFrontendScanAtsc3PlpInfo> tunerPlpInfos;
-            for (int i = 0; i < plpInfos.size(); i++) {
-                auto info = plpInfos[i];
-                int8_t plpId = (int8_t) info.plpId;
-                bool lls = (bool) info.bLlsFlag;
-                TunerFrontendScanAtsc3PlpInfo plpInfo{
-                    .plpId = plpId,
-                    .llsFlag = lls,
-                };
-                tunerPlpInfos.push_back(plpInfo);
-            }
-            scanMessage.set<TunerFrontendScanMessage::atsc3PlpInfos>(tunerPlpInfos);
-            break;
-        }
-        default:
-            break;
+    ALOGV("FrontendCallback::onScanMessage, type=%d", type);
+    if (mTunerFrontendCallback != nullptr) {
+        mTunerFrontendCallback->onScanMessage(type, message);
     }
-    mTunerFrontendCallback->onScanMessage((int)type, scanMessage);
-    return Void();
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<void> TunerFrontend::FrontendCallback::onScanMessageExt1_1(
-        FrontendScanMessageTypeExt1_1 type, const FrontendScanMessageExt1_1& message) {
-    ALOGD("onScanMessageExt1_1::onScanMessage, type=%d", type);
-    TunerFrontendScanMessage scanMessage;
-    switch(type) {
-        case FrontendScanMessageTypeExt1_1::MODULATION: {
-            FrontendModulation m = message.modulation();
-            int modulation;
-            switch (m.getDiscriminator()) {
-                case FrontendModulation::hidl_discriminator::dvbc:
-                    modulation = (int) m.dvbc();
-                    break;
-                case FrontendModulation::hidl_discriminator::dvbt:
-                    modulation = (int) m.dvbt();
-                    break;
-                case FrontendModulation::hidl_discriminator::dvbs:
-                    modulation = (int) m.dvbs();
-                    break;
-                case FrontendModulation::hidl_discriminator::isdbs:
-                    modulation = (int) m.isdbs();
-                    break;
-                case FrontendModulation::hidl_discriminator::isdbs3:
-                    modulation = (int) m.isdbs3();
-                    break;
-                case FrontendModulation::hidl_discriminator::isdbt:
-                    modulation = (int) m.isdbt();
-                    break;
-                case FrontendModulation::hidl_discriminator::atsc:
-                    modulation = (int) m.atsc();
-                    break;
-                case FrontendModulation::hidl_discriminator::atsc3:
-                    modulation = (int) m.atsc3();
-                    break;
-                case FrontendModulation::hidl_discriminator::dtmb:
-                    modulation = (int) m.dtmb();
-                    break;
-            }
-            scanMessage.set<TunerFrontendScanMessage::modulation>(modulation);
-            break;
-        }
-        case FrontendScanMessageTypeExt1_1::DVBC_ANNEX: {
-            scanMessage.set<TunerFrontendScanMessage::annex>((int)message.annex());
-            break;
-        }
-        case FrontendScanMessageTypeExt1_1::HIGH_PRIORITY: {
-            scanMessage.set<TunerFrontendScanMessage::isHighPriority>(message.isHighPriority());
-            break;
-        }
-        default:
-            break;
-    }
-    mTunerFrontendCallback->onScanMessage((int)type, scanMessage);
-    return Void();
-}
-
-/////////////// TunerFrontend Helper Methods ///////////////////////
-
-void TunerFrontend::getAidlFrontendStatus(
-        vector<FrontendStatus>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus) {
-    for (FrontendStatus s : hidlStatus) {
-        TunerFrontendStatus status;
-        switch (s.getDiscriminator()) {
-            case FrontendStatus::hidl_discriminator::isDemodLocked: {
-                status.set<TunerFrontendStatus::isDemodLocked>(s.isDemodLocked());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::snr: {
-                status.set<TunerFrontendStatus::snr>((int)s.snr());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::ber: {
-                status.set<TunerFrontendStatus::ber>((int)s.ber());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::per: {
-                status.set<TunerFrontendStatus::per>((int)s.per());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::preBer: {
-                status.set<TunerFrontendStatus::preBer>((int)s.preBer());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::signalQuality: {
-                status.set<TunerFrontendStatus::signalQuality>((int)s.signalQuality());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::signalStrength: {
-                status.set<TunerFrontendStatus::signalStrength>((int)s.signalStrength());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::symbolRate: {
-                status.set<TunerFrontendStatus::symbolRate>((int)s.symbolRate());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::innerFec: {
-                status.set<TunerFrontendStatus::innerFec>((long)s.innerFec());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::modulation: {
-                switch (s.modulation().getDiscriminator()) {
-                    case FrontendModulationStatus::hidl_discriminator::dvbc:
-                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().dvbc());
-                        aidlStatus.push_back(status);
-                        break;
-                    case FrontendModulationStatus::hidl_discriminator::dvbs:
-                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().dvbs());
-                        aidlStatus.push_back(status);
-                        break;
-                    case FrontendModulationStatus::hidl_discriminator::isdbs:
-                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().isdbs());
-                        aidlStatus.push_back(status);
-                        break;
-                    case FrontendModulationStatus::hidl_discriminator::isdbs3:
-                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().isdbs3());
-                        aidlStatus.push_back(status);
-                        break;
-                    case FrontendModulationStatus::hidl_discriminator::isdbt:
-                        status.set<TunerFrontendStatus::modulation>((int)s.modulation().isdbt());
-                        aidlStatus.push_back(status);
-                        break;
-                }
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::inversion: {
-                status.set<TunerFrontendStatus::inversion>((int)s.inversion());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::lnbVoltage: {
-                status.set<TunerFrontendStatus::lnbVoltage>((int)s.lnbVoltage());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::plpId: {
-                status.set<TunerFrontendStatus::plpId>((int8_t)s.plpId());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::isEWBS: {
-                status.set<TunerFrontendStatus::isEWBS>(s.isEWBS());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::agc: {
-                status.set<TunerFrontendStatus::agc>((int8_t)s.agc());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::isLnaOn: {
-                status.set<TunerFrontendStatus::isLnaOn>(s.isLnaOn());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::isLayerError: {
-                vector<bool> e(s.isLayerError().begin(), s.isLayerError().end());
-                status.set<TunerFrontendStatus::isLayerError>(e);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::mer: {
-                status.set<TunerFrontendStatus::mer>((int)s.mer());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::freqOffset: {
-                status.set<TunerFrontendStatus::freqOffset>((int)s.freqOffset());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::hierarchy: {
-                status.set<TunerFrontendStatus::hierarchy>((int)s.hierarchy());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::isRfLocked: {
-                status.set<TunerFrontendStatus::isRfLocked>(s.isRfLocked());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatus::hidl_discriminator::plpInfo: {
-                vector<TunerFrontendStatusAtsc3PlpInfo> info;
-                for (auto i : s.plpInfo()) {
-                    info.push_back({
-                        .plpId = (int8_t)i.plpId,
-                        .isLocked = i.isLocked,
-                        .uec = (int)i.uec,
-                    });
-                }
-                status.set<TunerFrontendStatus::plpInfo>(info);
-                aidlStatus.push_back(status);
-                break;
-            }
-        }
-    }
-}
-
-void TunerFrontend::getAidlFrontendStatusExt(
-        vector<FrontendStatusExt1_1>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus) {
-    for (FrontendStatusExt1_1 s : hidlStatus) {
-        TunerFrontendStatus status;
-        switch (s.getDiscriminator()) {
-            case FrontendStatusExt1_1::hidl_discriminator::modulations: {
-                vector<int> aidlMod;
-                for (auto m : s.modulations()) {
-                    switch (m.getDiscriminator()) {
-                        case FrontendModulation::hidl_discriminator::dvbc:
-                            aidlMod.push_back((int)m.dvbc());
-                            break;
-                        case FrontendModulation::hidl_discriminator::dvbs:
-                            aidlMod.push_back((int)m.dvbs());
-                            break;
-                        case FrontendModulation::hidl_discriminator::dvbt:
-                            aidlMod.push_back((int)m.dvbt());
-                            break;
-                        case FrontendModulation::hidl_discriminator::isdbs:
-                            aidlMod.push_back((int)m.isdbs());
-                            break;
-                        case FrontendModulation::hidl_discriminator::isdbs3:
-                            aidlMod.push_back((int)m.isdbs3());
-                            break;
-                        case FrontendModulation::hidl_discriminator::isdbt:
-                            aidlMod.push_back((int)m.isdbt());
-                            break;
-                        case FrontendModulation::hidl_discriminator::atsc:
-                            aidlMod.push_back((int)m.atsc());
-                            break;
-                        case FrontendModulation::hidl_discriminator::atsc3:
-                            aidlMod.push_back((int)m.atsc3());
-                            break;
-                        case FrontendModulation::hidl_discriminator::dtmb:
-                            aidlMod.push_back((int)m.dtmb());
-                            break;
-                    }
-                }
-                status.set<TunerFrontendStatus::modulations>(aidlMod);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::bers: {
-                vector<int> b(s.bers().begin(), s.bers().end());
-                status.set<TunerFrontendStatus::bers>(b);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::codeRates: {
-                vector<int64_t> codeRates;
-                for (auto c : s.codeRates()) {
-                    codeRates.push_back((long)c);
-                }
-                status.set<TunerFrontendStatus::codeRates>(codeRates);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::bandwidth: {
-                switch (s.bandwidth().getDiscriminator()) {
-                    case FrontendBandwidth::hidl_discriminator::atsc3:
-                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().atsc3());
-                        break;
-                    case FrontendBandwidth::hidl_discriminator::dvbc:
-                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().dvbc());
-                        break;
-                    case FrontendBandwidth::hidl_discriminator::dvbt:
-                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().dvbt());
-                        break;
-                    case FrontendBandwidth::hidl_discriminator::isdbt:
-                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().isdbt());
-                        break;
-                    case FrontendBandwidth::hidl_discriminator::dtmb:
-                        status.set<TunerFrontendStatus::bandwidth>((int)s.bandwidth().dtmb());
-                        break;
-                }
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::interval: {
-                switch (s.interval().getDiscriminator()) {
-                    case FrontendGuardInterval::hidl_discriminator::dvbt:
-                        status.set<TunerFrontendStatus::interval>((int)s.interval().dvbt());
-                        break;
-                    case FrontendGuardInterval::hidl_discriminator::isdbt:
-                        status.set<TunerFrontendStatus::interval>((int)s.interval().isdbt());
-                        break;
-                    case FrontendGuardInterval::hidl_discriminator::dtmb:
-                        status.set<TunerFrontendStatus::interval>((int)s.interval().dtmb());
-                        break;
-                }
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::transmissionMode: {
-                switch (s.transmissionMode().getDiscriminator()) {
-                    case FrontendTransmissionMode::hidl_discriminator::dvbt:
-                        status.set<TunerFrontendStatus::transmissionMode>(
-                                (int)s.transmissionMode().dvbt());
-                        break;
-                    case FrontendTransmissionMode::hidl_discriminator::isdbt:
-                        status.set<TunerFrontendStatus::transmissionMode>(
-                                (int)s.transmissionMode().isdbt());
-                        break;
-                    case FrontendTransmissionMode::hidl_discriminator::dtmb:
-                        status.set<TunerFrontendStatus::transmissionMode>(
-                                (int)s.transmissionMode().dtmb());
-                        break;
-                }
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::uec: {
-                status.set<TunerFrontendStatus::uec>((int)s.uec());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::systemId: {
-                status.set<TunerFrontendStatus::systemId>((char16_t)s.systemId());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::interleaving: {
-                vector<int> aidlInter;
-                for (auto i : s.interleaving()) {
-                    switch (i.getDiscriminator()) {
-                        case FrontendInterleaveMode::hidl_discriminator::atsc3:
-                            aidlInter.push_back((int)i.atsc3());
-                            break;
-                        case FrontendInterleaveMode::hidl_discriminator::dvbc:
-                            aidlInter.push_back((int)i.dvbc());
-                            break;
-                        case FrontendInterleaveMode::hidl_discriminator::dtmb:
-                            aidlInter.push_back((int)i.dtmb());
-                            break;
-                    }
-                }
-                status.set<TunerFrontendStatus::interleaving>(aidlInter);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::isdbtSegment: {
-                auto seg = s.isdbtSegment();
-                vector<uint8_t> i(seg.begin(), seg.end());
-                status.set<TunerFrontendStatus::isdbtSegment>(i);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::tsDataRate: {
-                vector<int> ts(s.tsDataRate().begin(), s.tsDataRate().end());
-                status.set<TunerFrontendStatus::tsDataRate>(ts);
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::rollOff: {
-                switch (s.rollOff().getDiscriminator()) {
-                    case FrontendRollOff::hidl_discriminator::dvbs:
-                        status.set<TunerFrontendStatus::rollOff>((int)s.rollOff().dvbs());
-                        break;
-                    case FrontendRollOff::hidl_discriminator::isdbs:
-                        status.set<TunerFrontendStatus::rollOff>((int)s.rollOff().isdbs());
-                        break;
-                    case FrontendRollOff::hidl_discriminator::isdbs3:
-                        status.set<TunerFrontendStatus::rollOff>((int)s.rollOff().isdbs3());
-                        break;
-                }
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::isMiso: {
-                status.set<TunerFrontendStatus::isMiso>(s.isMiso());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::isLinear: {
-                status.set<TunerFrontendStatus::isLinear>(s.isLinear());
-                aidlStatus.push_back(status);
-                break;
-            }
-            case FrontendStatusExt1_1::hidl_discriminator::isShortFrames: {
-                status.set<TunerFrontendStatus::isShortFrames>(s.isShortFrames());
-                aidlStatus.push_back(status);
-                break;
-            }
-        }
-    }
-}
-
-hidl_vec<FrontendAtsc3PlpSettings> TunerFrontend::getAtsc3PlpSettings(
-        const TunerFrontendAtsc3Settings& settings) {
-    int len = settings.plpSettings.size();
-    hidl_vec<FrontendAtsc3PlpSettings> plps = hidl_vec<FrontendAtsc3PlpSettings>(len);
-    // parse PLP settings
-    for (int i = 0; i < len; i++) {
-        uint8_t plpId = static_cast<uint8_t>(settings.plpSettings[i].plpId);
-        FrontendAtsc3Modulation modulation =
-                static_cast<FrontendAtsc3Modulation>(settings.plpSettings[i].modulation);
-        FrontendAtsc3TimeInterleaveMode interleaveMode =
-                static_cast<FrontendAtsc3TimeInterleaveMode>(
-                        settings.plpSettings[i].interleaveMode);
-        FrontendAtsc3CodeRate codeRate =
-                static_cast<FrontendAtsc3CodeRate>(settings.plpSettings[i].codeRate);
-        FrontendAtsc3Fec fec =
-                static_cast<FrontendAtsc3Fec>(settings.plpSettings[i].fec);
-        FrontendAtsc3PlpSettings frontendAtsc3PlpSettings {
-                .plpId = plpId,
-                .modulation = modulation,
-                .interleaveMode = interleaveMode,
-                .codeRate = codeRate,
-                .fec = fec,
-        };
-        plps[i] = frontendAtsc3PlpSettings;
-    }
-    return plps;
-}
-
-FrontendDvbsCodeRate TunerFrontend::getDvbsCodeRate(const TunerFrontendDvbsCodeRate& codeRate) {
-    FrontendInnerFec innerFec = static_cast<FrontendInnerFec>(codeRate.fec);
-    bool isLinear = codeRate.isLinear;
-    bool isShortFrames = codeRate.isShortFrames;
-    uint32_t bitsPer1000Symbol = static_cast<uint32_t>(codeRate.bitsPer1000Symbol);
-    FrontendDvbsCodeRate coderate {
-            .fec = innerFec,
-            .isLinear = isLinear,
-            .isShortFrames = isShortFrames,
-            .bitsPer1000Symbol = bitsPer1000Symbol,
-    };
-    return coderate;
-}
-
-FrontendSettings TunerFrontend::getHidlFrontendSettings(const TunerFrontendSettings& aidlSettings) {
-    auto settings = aidlSettings.settings;
-    FrontendSettings frontendSettings;
-
-    switch (settings.getTag()) {
-        case TunerFrontendUnionSettings::analog: {
-            auto analog = settings.get<TunerFrontendUnionSettings::analog>();
-            frontendSettings.analog({
-                .frequency = static_cast<uint32_t>(analog.frequency),
-                .type = static_cast<FrontendAnalogType>(analog.signalType),
-                .sifStandard = static_cast<FrontendAnalogSifStandard>(analog.sifStandard),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::atsc: {
-            auto atsc = settings.get<TunerFrontendUnionSettings::atsc>();
-            frontendSettings.atsc({
-                .frequency = static_cast<uint32_t>(atsc.frequency),
-                .modulation = static_cast<FrontendAtscModulation>(atsc.modulation),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::atsc3: {
-            auto atsc3 = settings.get<TunerFrontendUnionSettings::atsc3>();
-            frontendSettings.atsc3({
-                .frequency = static_cast<uint32_t>(atsc3.frequency),
-                .bandwidth = static_cast<FrontendAtsc3Bandwidth>(atsc3.bandwidth),
-                .demodOutputFormat = static_cast<FrontendAtsc3DemodOutputFormat>(
-                        atsc3.demodOutputFormat),
-                .plpSettings = getAtsc3PlpSettings(atsc3),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::cable: {
-            auto dvbc = settings.get<TunerFrontendUnionSettings::cable>();
-            frontendSettings.dvbc({
-                .frequency = static_cast<uint32_t>(dvbc.frequency),
-                .modulation = static_cast<FrontendDvbcModulation>(dvbc.modulation),
-                .fec = static_cast<FrontendInnerFec>(dvbc.innerFec),
-                .symbolRate = static_cast<uint32_t>(dvbc.symbolRate),
-                .outerFec = static_cast<FrontendDvbcOuterFec>(dvbc.outerFec),
-                .annex = static_cast<FrontendDvbcAnnex>(dvbc.annex),
-                .spectralInversion = static_cast<FrontendDvbcSpectralInversion>(
-                        dvbc.spectralInversion),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::dvbs: {
-            auto dvbs = settings.get<TunerFrontendUnionSettings::dvbs>();
-            frontendSettings.dvbs({
-                .frequency = static_cast<uint32_t>(dvbs.frequency),
-                .modulation = static_cast<FrontendDvbsModulation>(dvbs.modulation),
-                .coderate = getDvbsCodeRate(dvbs.codeRate),
-                .symbolRate = static_cast<uint32_t>(dvbs.symbolRate),
-                .rolloff = static_cast<FrontendDvbsRolloff>(dvbs.rolloff),
-                .pilot = static_cast<FrontendDvbsPilot>(dvbs.pilot),
-                .inputStreamId = static_cast<uint32_t>(dvbs.inputStreamId),
-                .standard = static_cast<FrontendDvbsStandard>(dvbs.standard),
-                .vcmMode = static_cast<FrontendDvbsVcmMode>(dvbs.vcm),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::dvbt: {
-            auto dvbt = settings.get<TunerFrontendUnionSettings::dvbt>();
-            frontendSettings.dvbt({
-                .frequency = static_cast<uint32_t>(dvbt.frequency),
-                .transmissionMode = static_cast<FrontendDvbtTransmissionMode>(
-                        dvbt.transmissionMode),
-                .bandwidth = static_cast<FrontendDvbtBandwidth>(dvbt.bandwidth),
-                .constellation = static_cast<FrontendDvbtConstellation>(dvbt.constellation),
-                .hierarchy = static_cast<FrontendDvbtHierarchy>(dvbt.hierarchy),
-                .hpCoderate = static_cast<FrontendDvbtCoderate>(dvbt.hpCodeRate),
-                .lpCoderate = static_cast<FrontendDvbtCoderate>(dvbt.lpCodeRate),
-                .guardInterval = static_cast<FrontendDvbtGuardInterval>(dvbt.guardInterval),
-                .isHighPriority = dvbt.isHighPriority,
-                .standard = static_cast<FrontendDvbtStandard>(dvbt.standard),
-                .isMiso = dvbt.isMiso,
-                .plpMode = static_cast<FrontendDvbtPlpMode>(dvbt.plpMode),
-                .plpId = static_cast<uint8_t>(dvbt.plpId),
-                .plpGroupId = static_cast<uint8_t>(dvbt.plpGroupId),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::isdbs: {
-            auto isdbs = settings.get<TunerFrontendUnionSettings::isdbs>();
-            frontendSettings.isdbs({
-                .frequency = static_cast<uint32_t>(isdbs.frequency),
-                .streamId = static_cast<uint16_t>(isdbs.streamId),
-                .streamIdType = static_cast<FrontendIsdbsStreamIdType>(isdbs.streamIdType),
-                .modulation = static_cast<FrontendIsdbsModulation>(isdbs.modulation),
-                .coderate = static_cast<FrontendIsdbsCoderate>(isdbs.codeRate),
-                .symbolRate = static_cast<uint32_t>(isdbs.symbolRate),
-                .rolloff = static_cast<FrontendIsdbsRolloff>(isdbs.rolloff),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::isdbs3: {
-            auto isdbs3 = settings.get<TunerFrontendUnionSettings::isdbs3>();
-            frontendSettings.isdbs3({
-                .frequency = static_cast<uint32_t>(isdbs3.frequency),
-                .streamId = static_cast<uint16_t>(isdbs3.streamId),
-                .streamIdType = static_cast<FrontendIsdbsStreamIdType>(isdbs3.streamIdType),
-                .modulation = static_cast<FrontendIsdbs3Modulation>(isdbs3.modulation),
-                .coderate = static_cast<FrontendIsdbs3Coderate>(isdbs3.codeRate),
-                .symbolRate = static_cast<uint32_t>(isdbs3.symbolRate),
-                .rolloff = static_cast<FrontendIsdbs3Rolloff>(isdbs3.rolloff),
-            });
-            break;
-        }
-        case TunerFrontendUnionSettings::isdbt: {
-            auto isdbt = settings.get<TunerFrontendUnionSettings::isdbt>();
-            frontendSettings.isdbt({
-                .frequency = static_cast<uint32_t>(isdbt.frequency),
-                .modulation = static_cast<FrontendIsdbtModulation>(isdbt.modulation),
-                .bandwidth = static_cast<FrontendIsdbtBandwidth>(isdbt.bandwidth),
-                .mode = static_cast<FrontendIsdbtMode>(isdbt.mode),
-                .coderate = static_cast<FrontendIsdbtCoderate>(isdbt.codeRate),
-                .guardInterval = static_cast<FrontendIsdbtGuardInterval>(isdbt.guardInterval),
-                .serviceAreaId = static_cast<uint32_t>(isdbt.serviceAreaId),
-            });
-            break;
-        }
-        default:
-            break;
-    }
-
-    return frontendSettings;
-}
-
-FrontendSettingsExt1_1 TunerFrontend::getHidlFrontendSettingsExt(
-        const TunerFrontendSettings& aidlSettings) {
-    FrontendSettingsExt1_1 frontendSettingsExt{
-        .endFrequency = static_cast<uint32_t>(aidlSettings.endFrequency),
-        .inversion = static_cast<FrontendSpectralInversion>(aidlSettings.inversion),
-    };
-
-    auto settings = aidlSettings.settings;
-    switch (settings.getTag()) {
-        case TunerFrontendUnionSettings::analog: {
-            auto analog = settings.get<TunerFrontendUnionSettings::analog>();
-            if (analog.isExtended) {
-                frontendSettingsExt.settingExt.analog({
-                    .aftFlag = static_cast<FrontendAnalogAftFlag>(analog.aftFlag),
-                });
-            } else {
-                frontendSettingsExt.settingExt.noinit();
-            }
-            break;
-        }
-        case TunerFrontendUnionSettings::cable: {
-            auto dvbc = settings.get<TunerFrontendUnionSettings::cable>();
-            if (dvbc.isExtended) {
-                frontendSettingsExt.settingExt.dvbc({
-                    .interleaveMode = static_cast<FrontendCableTimeInterleaveMode>(
-                            dvbc.interleaveMode),
-                    .bandwidth = static_cast<FrontendDvbcBandwidth>(
-                            dvbc.bandwidth),
-                });
-            } else {
-                frontendSettingsExt.settingExt.noinit();
-            }
-            break;
-        }
-        case TunerFrontendUnionSettings::dvbs: {
-            auto dvbs = settings.get<TunerFrontendUnionSettings::dvbs>();
-            if (dvbs.isExtended) {
-                frontendSettingsExt.settingExt.dvbs({
-                    .scanType = static_cast<FrontendDvbsScanType>(dvbs.scanType),
-                    .isDiseqcRxMessage = dvbs.isDiseqcRxMessage,
-                });
-            } else {
-                frontendSettingsExt.settingExt.noinit();
-            }
-            break;
-        }
-        case TunerFrontendUnionSettings::dvbt: {
-            auto dvbt = settings.get<TunerFrontendUnionSettings::dvbt>();
-            if (dvbt.isExtended) {
-                frontendSettingsExt.settingExt.dvbt({
-                    .constellation =
-                            static_cast<hardware::tv::tuner::V1_1::FrontendDvbtConstellation>(
-                                    dvbt.constellation),
-                    .transmissionMode =
-                            static_cast<hardware::tv::tuner::V1_1::FrontendDvbtTransmissionMode>(
-                                    dvbt.transmissionMode),
-                });
-            } else {
-                frontendSettingsExt.settingExt.noinit();
-            }
-            break;
-        }
-        case TunerFrontendUnionSettings::dtmb: {
-            auto dtmb = settings.get<TunerFrontendUnionSettings::dtmb>();
-            frontendSettingsExt.settingExt.dtmb({
-                .frequency = static_cast<uint32_t>(dtmb.frequency),
-                .transmissionMode = static_cast<FrontendDtmbTransmissionMode>(
-                        dtmb.transmissionMode),
-                .bandwidth = static_cast<FrontendDtmbBandwidth>(dtmb.bandwidth),
-                .modulation = static_cast<FrontendDtmbModulation>(dtmb.modulation),
-                .codeRate = static_cast<FrontendDtmbCodeRate>(dtmb.codeRate),
-                .guardInterval = static_cast<FrontendDtmbGuardInterval>(dtmb.guardInterval),
-                .interleaveMode = static_cast<FrontendDtmbTimeInterleaveMode>(dtmb.interleaveMode),
-            });
-            break;
-        }
-        default:
-            frontendSettingsExt.settingExt.noinit();
-            break;
-    }
-
-    return frontendSettingsExt;
-}
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerFrontend.h b/services/tuner/TunerFrontend.h
index 22fd509..da471fb 100644
--- a/services/tuner/TunerFrontend.h
+++ b/services/tuner/TunerFrontend.h
@@ -1,5 +1,5 @@
 /**
- * Copyright 2020, The Android Open Source Project
+ * Copyright 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -17,95 +17,78 @@
 #ifndef ANDROID_MEDIA_TUNERFRONTEND_H
 #define ANDROID_MEDIA_TUNERFRONTEND_H
 
+#include <aidl/android/hardware/tv/tuner/BnFrontendCallback.h>
+#include <aidl/android/hardware/tv/tuner/IFrontend.h>
+#include <aidl/android/hardware/tv/tuner/IFrontendCallback.h>
 #include <aidl/android/media/tv/tuner/BnTunerFrontend.h>
-#include <android/hardware/tv/tuner/1.0/ITuner.h>
-#include <android/hardware/tv/tuner/1.1/IFrontend.h>
-#include <android/hardware/tv/tuner/1.1/IFrontendCallback.h>
-#include <media/stagefright/foundation/ADebug.h>
 #include <utils/Log.h>
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::tv::tuner::BnTunerFrontend;
-using ::aidl::android::media::tv::tuner::ITunerFrontendCallback;
-using ::aidl::android::media::tv::tuner::ITunerLnb;
-using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Settings;
-using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCodeRate;
-using ::aidl::android::media::tv::tuner::TunerFrontendScanMessage;
-using ::aidl::android::media::tv::tuner::TunerFrontendSettings;
-using ::aidl::android::media::tv::tuner::TunerFrontendStatus;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3PlpSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendDvbsCodeRate;
-using ::android::hardware::tv::tuner::V1_0::FrontendEventType;
-using ::android::hardware::tv::tuner::V1_0::FrontendId;
-using ::android::hardware::tv::tuner::V1_0::FrontendScanMessage;
-using ::android::hardware::tv::tuner::V1_0::FrontendScanMessageType;
-using ::android::hardware::tv::tuner::V1_0::FrontendSettings;
-using ::android::hardware::tv::tuner::V1_0::FrontendStatus;
-using ::android::hardware::tv::tuner::V1_0::IFrontend;
-using ::android::hardware::tv::tuner::V1_1::IFrontendCallback;
-using ::android::hardware::tv::tuner::V1_1::FrontendScanMessageExt1_1;
-using ::android::hardware::tv::tuner::V1_1::FrontendScanMessageTypeExt1_1;
-using ::android::hardware::tv::tuner::V1_1::FrontendSettingsExt1_1;
-using ::android::hardware::tv::tuner::V1_1::FrontendStatusExt1_1;
+using ::aidl::android::hardware::tv::tuner::BnFrontendCallback;
+using ::aidl::android::hardware::tv::tuner::FrontendEventType;
+using ::aidl::android::hardware::tv::tuner::FrontendScanMessage;
+using ::aidl::android::hardware::tv::tuner::FrontendScanMessageType;
+using ::aidl::android::hardware::tv::tuner::FrontendScanType;
+using ::aidl::android::hardware::tv::tuner::FrontendSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendStatus;
+using ::aidl::android::hardware::tv::tuner::FrontendStatusReadiness;
+using ::aidl::android::hardware::tv::tuner::FrontendStatusType;
+using ::aidl::android::hardware::tv::tuner::IFrontend;
+using ::aidl::android::hardware::tv::tuner::IFrontendCallback;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
 class TunerFrontend : public BnTunerFrontend {
 
 public:
-    TunerFrontend(sp<IFrontend> frontend, int id);
+    TunerFrontend(shared_ptr<IFrontend> frontend, int id);
     virtual ~TunerFrontend();
-    Status setCallback(
-            const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) override;
-    Status tune(const TunerFrontendSettings& settings) override;
-    Status stopTune() override;
-    Status scan(const TunerFrontendSettings& settings, int frontendScanType) override;
-    Status stopScan() override;
-    Status setLnb(const shared_ptr<ITunerLnb>& lnb) override;
-    Status setLna(bool bEnable) override;
-    Status linkCiCamToFrontend(int ciCamId, int32_t* _aidl_return) override;
-    Status unlinkCiCamToFrontend(int ciCamId) override;
-    Status close() override;
-    Status getStatus(const vector<int32_t>& statusTypes,
-            vector<TunerFrontendStatus>* _aidl_return) override;
-    Status getStatusExtended_1_1(const vector<int32_t>& statusTypes,
-            vector<TunerFrontendStatus>* _aidl_return) override;
-    Status getFrontendId(int* _aidl_return) override;
 
-    struct FrontendCallback : public IFrontendCallback {
+    ::ndk::ScopedAStatus setCallback(
+            const shared_ptr<ITunerFrontendCallback>& in_tunerFrontendCallback) override;
+    ::ndk::ScopedAStatus tune(const FrontendSettings& in_settings) override;
+    ::ndk::ScopedAStatus stopTune() override;
+    ::ndk::ScopedAStatus scan(const FrontendSettings& in_settings,
+                              FrontendScanType in_frontendScanType) override;
+    ::ndk::ScopedAStatus stopScan() override;
+    ::ndk::ScopedAStatus setLnb(const shared_ptr<ITunerLnb>& in_lnb) override;
+    ::ndk::ScopedAStatus linkCiCamToFrontend(int32_t in_ciCamId, int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus unlinkCiCamToFrontend(int32_t in_ciCamId) override;
+    ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus getStatus(const vector<FrontendStatusType>& in_statusTypes,
+                                   vector<FrontendStatus>* _aidl_return) override;
+    ::ndk::ScopedAStatus getFrontendId(int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getHardwareInfo(std::string* _aidl_return) override;
+    ::ndk::ScopedAStatus removeOutputPid(int32_t in_pid) override;
+    ::ndk::ScopedAStatus getFrontendStatusReadiness(
+            const std::vector<FrontendStatusType>& in_statusTypes,
+            std::vector<FrontendStatusReadiness>* _aidl_return) override;
+
+    struct FrontendCallback : public BnFrontendCallback {
         FrontendCallback(const shared_ptr<ITunerFrontendCallback> tunerFrontendCallback)
-                : mTunerFrontendCallback(tunerFrontendCallback) {};
+              : mTunerFrontendCallback(tunerFrontendCallback){};
 
-        virtual Return<void> onEvent(FrontendEventType frontendEventType);
-        virtual Return<void> onScanMessage(
-                FrontendScanMessageType type, const FrontendScanMessage& message);
-        virtual Return<void> onScanMessageExt1_1(
-                FrontendScanMessageTypeExt1_1 type, const FrontendScanMessageExt1_1& message);
+        ::ndk::ScopedAStatus onEvent(FrontendEventType frontendEventType) override;
+        ::ndk::ScopedAStatus onScanMessage(FrontendScanMessageType type,
+                                           const FrontendScanMessage& message) override;
 
         shared_ptr<ITunerFrontendCallback> mTunerFrontendCallback;
     };
 
 private:
-    hidl_vec<FrontendAtsc3PlpSettings> getAtsc3PlpSettings(
-            const TunerFrontendAtsc3Settings& settings);
-    FrontendDvbsCodeRate getDvbsCodeRate(const TunerFrontendDvbsCodeRate& codeRate);
-    FrontendSettings getHidlFrontendSettings(const TunerFrontendSettings& aidlSettings);
-    FrontendSettingsExt1_1 getHidlFrontendSettingsExt(const TunerFrontendSettings& aidlSettings);
-    void getAidlFrontendStatus(
-            vector<FrontendStatus>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus);
-    void getAidlFrontendStatusExt(
-            vector<FrontendStatusExt1_1>& hidlStatus, vector<TunerFrontendStatus>& aidlStatus);
-
     int mId;
-    sp<IFrontend> mFrontend;
-    sp<::android::hardware::tv::tuner::V1_1::IFrontend> mFrontend_1_1;
+    shared_ptr<IFrontend> mFrontend;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERFRONTEND_H
diff --git a/services/tuner/TunerHelper.cpp b/services/tuner/TunerHelper.cpp
new file mode 100644
index 0000000..dc67110
--- /dev/null
+++ b/services/tuner/TunerHelper.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TunerHelper.h"
+
+#include <aidl/android/media/tv/tunerresourcemanager/ITunerResourceManager.h>
+#include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+
+using ::aidl::android::media::tv::tunerresourcemanager::ITunerResourceManager;
+using ::android::defaultServiceManager;
+using ::android::IBinder;
+using ::android::interface_cast;
+using ::android::IServiceManager;
+using ::android::sp;
+using ::android::binder::Status;
+using ::android::content::pm::IPackageManagerNative;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+// System Feature defined in PackageManager
+static const ::android::String16 FEATURE_TUNER(::android::String16("android.hardware.tv.tuner"));
+
+int32_t TunerHelper::sResourceRequestCount = 0;
+
+bool TunerHelper::checkTunerFeature() {
+    sp<IServiceManager> serviceMgr = defaultServiceManager();
+    sp<IPackageManagerNative> packageMgr;
+    if (serviceMgr.get() == nullptr) {
+        ALOGE("%s: Cannot find service manager", __func__);
+        return false;
+    }
+
+    sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+    packageMgr = interface_cast<IPackageManagerNative>(binder);
+    if (packageMgr != nullptr) {
+        bool hasFeature = false;
+        Status status = packageMgr->hasSystemFeature(FEATURE_TUNER, 0, &hasFeature);
+        if (!status.isOk()) {
+            ALOGE("%s: hasSystemFeature failed: %s", __func__, status.exceptionMessage().c_str());
+            return false;
+        }
+        if (!hasFeature) {
+            ALOGD("Current device does not support tuner feaure.");
+            return false;
+        }
+    } else {
+        ALOGD("%s: Cannot find package manager.", __func__);
+        return false;
+    }
+
+    return true;
+}
+
+// TODO: update Demux, Descrambler.
+void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
+                                       const vector<int32_t>& lnbHandles) {
+    ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
+    shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
+    if (tunerRM == nullptr) {
+        return;
+    }
+
+    tunerRM->setFrontendInfoList(feInfos);
+    tunerRM->setLnbInfoList(lnbHandles);
+}
+
+// TODO: create a map between resource id and handles.
+int TunerHelper::getResourceIdFromHandle(int resourceHandle, int /*type*/) {
+    return (resourceHandle & 0x00ff0000) >> 16;
+}
+
+int TunerHelper::getResourceHandleFromId(int id, int resourceType) {
+    // TODO: build up randomly generated id to handle mapping
+    return (resourceType & 0x000000ff) << 24 | (id << 16) | (sResourceRequestCount++ & 0xffff);
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerHelper.h b/services/tuner/TunerHelper.h
new file mode 100644
index 0000000..755df57
--- /dev/null
+++ b/services/tuner/TunerHelper.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERDVRHELPER_H
+#define ANDROID_MEDIA_TUNERDVRHELPER_H
+
+#include <aidl/android/media/tv/tunerresourcemanager/TunerFrontendInfo.h>
+#include <utils/String16.h>
+
+using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
+using ::android::String16;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+const static int TUNER_HAL_VERSION_UNKNOWN = 0;
+const static int TUNER_HAL_VERSION_1_0 = 1 << 16;
+const static int TUNER_HAL_VERSION_1_1 = (1 << 16) | 1;
+const static int TUNER_HAL_VERSION_2_0 = 2 << 16;
+
+// Keep syncing with ShareFilter.java
+const static int STATUS_INACCESSIBLE = 1 << 7;
+
+const static String16 sSharedFilterPermission("android.permission.ACCESS_TV_SHARED_FILTER");
+
+typedef enum {
+    FRONTEND,
+    DEMUX,
+    DESCRAMBLER,
+    LNB
+} TunerResourceType;
+
+class TunerHelper {
+public:
+    static bool checkTunerFeature();
+
+    // TODO: update Demux, Descrambler.
+    static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
+                                     const vector<int32_t>& lnbHandles);
+    // TODO: create a map between resource id and handles.
+    static int getResourceIdFromHandle(int resourceHandle, int type);
+    static int getResourceHandleFromId(int id, int resourceType);
+
+private:
+    static int32_t sResourceRequestCount;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERDVRHELPER_H
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 77248d4..1e143c3 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -18,123 +18,116 @@
 
 #include "TunerLnb.h"
 
-using ::android::hardware::tv::tuner::V1_0::LnbPosition;
-using ::android::hardware::tv::tuner::V1_0::LnbTone;
-using ::android::hardware::tv::tuner::V1_0::LnbVoltage;
-using ::android::hardware::tv::tuner::V1_0::Result;
+#include <aidl/android/hardware/tv/tuner/ILnbCallback.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
 
+using ::aidl::android::hardware::tv::tuner::ILnbCallback;
+using ::aidl::android::hardware::tv::tuner::Result;
+
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-TunerLnb::TunerLnb(sp<ILnb> lnb, int id) {
+TunerLnb::TunerLnb(shared_ptr<ILnb> lnb, int id) {
     mLnb = lnb;
     mId = id;
 }
 
 TunerLnb::~TunerLnb() {
-    mLnb = NULL;
+    mLnb = nullptr;
     mId = -1;
 }
 
-Status TunerLnb::setCallback(
-        const shared_ptr<ITunerLnbCallback>& tunerLnbCallback) {
-    if (mLnb == NULL) {
+::ndk::ScopedAStatus TunerLnb::setCallback(
+        const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) {
+    if (mLnb == nullptr) {
         ALOGE("ILnb is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    if (tunerLnbCallback == NULL) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    if (in_tunerLnbCallback == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
     }
 
-    sp<ILnbCallback> lnbCallback = new LnbCallback(tunerLnbCallback);
-    Result status = mLnb->setCallback(lnbCallback);
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    shared_ptr<ILnbCallback> lnbCallback =
+            ::ndk::SharedRefBase::make<LnbCallback>(in_tunerLnbCallback);
+    return mLnb->setCallback(lnbCallback);
 }
 
-Status TunerLnb::setVoltage(int voltage) {
-    if (mLnb == NULL) {
+::ndk::ScopedAStatus TunerLnb::setVoltage(LnbVoltage in_voltage) {
+    if (mLnb == nullptr) {
         ALOGE("ILnb is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mLnb->setVoltage(static_cast<LnbVoltage>(voltage));
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mLnb->setVoltage(in_voltage);
 }
 
-Status TunerLnb::setTone(int tone) {
-    if (mLnb == NULL) {
+::ndk::ScopedAStatus TunerLnb::setTone(LnbTone in_tone) {
+    if (mLnb == nullptr) {
         ALOGE("ILnb is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mLnb->setTone(static_cast<LnbTone>(tone));
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mLnb->setTone(in_tone);
 }
 
-Status TunerLnb::setSatellitePosition(int position) {
-    if (mLnb == NULL) {
+::ndk::ScopedAStatus TunerLnb::setSatellitePosition(LnbPosition in_position) {
+    if (mLnb == nullptr) {
         ALOGE("ILnb is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mLnb->setSatellitePosition(static_cast<LnbPosition>(position));
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mLnb->setSatellitePosition(in_position);
 }
 
-Status TunerLnb::sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) {
-    if (mLnb == NULL) {
+::ndk::ScopedAStatus TunerLnb::sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) {
+    if (mLnb == nullptr) {
         ALOGE("ILnb is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mLnb->sendDiseqcMessage(diseqcMessage);
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mLnb->sendDiseqcMessage(in_diseqcMessage);
 }
 
-Status TunerLnb::close() {
-    if (mLnb == NULL) {
+::ndk::ScopedAStatus TunerLnb::close() {
+    if (mLnb == nullptr) {
         ALOGE("ILnb is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mLnb->close();
-    mLnb = NULL;
+    auto res = mLnb->close();
+    mLnb = nullptr;
 
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return res;
 }
 
 /////////////// ILnbCallback ///////////////////////
-
-Return<void> TunerLnb::LnbCallback::onEvent(const LnbEventType lnbEventType) {
-    if (mTunerLnbCallback != NULL) {
-        mTunerLnbCallback->onEvent((int)lnbEventType);
+::ndk::ScopedAStatus TunerLnb::LnbCallback::onEvent(const LnbEventType lnbEventType) {
+    if (mTunerLnbCallback != nullptr) {
+        mTunerLnbCallback->onEvent(lnbEventType);
     }
-    return Void();
+    return ndk::ScopedAStatus::ok();
 }
 
-Return<void> TunerLnb::LnbCallback::onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage) {
-    if (mTunerLnbCallback != NULL && diseqcMessage != NULL) {
-        vector<uint8_t> msg(begin(diseqcMessage), end(diseqcMessage));
-        mTunerLnbCallback->onDiseqcMessage(msg);
+::ndk::ScopedAStatus TunerLnb::LnbCallback::onDiseqcMessage(const vector<uint8_t>& diseqcMessage) {
+    if (mTunerLnbCallback != nullptr) {
+        mTunerLnbCallback->onDiseqcMessage(diseqcMessage);
     }
-    return Void();
+    return ndk::ScopedAStatus::ok();
 }
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerLnb.h b/services/tuner/TunerLnb.h
index 500d072..72988a6 100644
--- a/services/tuner/TunerLnb.h
+++ b/services/tuner/TunerLnb.h
@@ -17,55 +17,61 @@
 #ifndef ANDROID_MEDIA_TUNERFLNB_H
 #define ANDROID_MEDIA_TUNERFLNB_H
 
+#include <aidl/android/hardware/tv/tuner/BnLnbCallback.h>
+#include <aidl/android/hardware/tv/tuner/ILnb.h>
 #include <aidl/android/media/tv/tuner/BnTunerLnb.h>
-#include <android/hardware/tv/tuner/1.0/ILnb.h>
-#include <android/hardware/tv/tuner/1.0/ILnbCallback.h>
-#include <media/stagefright/foundation/ADebug.h>
 #include <utils/Log.h>
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::tv::tuner::BnTunerLnb;
-using ::aidl::android::media::tv::tuner::ITunerLnbCallback;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::tv::tuner::V1_0::ILnb;
-using ::android::hardware::tv::tuner::V1_0::ILnbCallback;
-using ::android::hardware::tv::tuner::V1_0::LnbEventType;
+using ::aidl::android::hardware::tv::tuner::BnLnbCallback;
+using ::aidl::android::hardware::tv::tuner::ILnb;
+using ::aidl::android::hardware::tv::tuner::LnbEventType;
+using ::aidl::android::hardware::tv::tuner::LnbPosition;
+using ::aidl::android::hardware::tv::tuner::LnbTone;
+using ::aidl::android::hardware::tv::tuner::LnbVoltage;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
 class TunerLnb : public BnTunerLnb {
 
 public:
-    TunerLnb(sp<ILnb> lnb, int id);
+    TunerLnb(shared_ptr<ILnb> lnb, int id);
     virtual ~TunerLnb();
-    Status setCallback(const shared_ptr<ITunerLnbCallback>& tunerLnbCallback) override;
-    Status setVoltage(int voltage) override;
-    Status setTone(int tone) override;
-    Status setSatellitePosition(int position) override;
-    Status sendDiseqcMessage(const vector<uint8_t>& diseqcMessage) override;
-    Status close() override;
+
+    ::ndk::ScopedAStatus setCallback(
+            const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) override;
+    ::ndk::ScopedAStatus setVoltage(LnbVoltage in_voltage) override;
+    ::ndk::ScopedAStatus setTone(LnbTone in_tone) override;
+    ::ndk::ScopedAStatus setSatellitePosition(LnbPosition in_position) override;
+    ::ndk::ScopedAStatus sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) override;
+    ::ndk::ScopedAStatus close() override;
 
     int getId() { return mId; }
 
-    struct LnbCallback : public ILnbCallback {
+    struct LnbCallback : public BnLnbCallback {
         LnbCallback(const shared_ptr<ITunerLnbCallback> tunerLnbCallback)
-                : mTunerLnbCallback(tunerLnbCallback) {};
+              : mTunerLnbCallback(tunerLnbCallback){};
 
-        virtual Return<void> onEvent(const LnbEventType lnbEventType);
-        virtual Return<void> onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage);
+        ::ndk::ScopedAStatus onEvent(const LnbEventType lnbEventType) override;
+        ::ndk::ScopedAStatus onDiseqcMessage(const vector<uint8_t>& diseqcMessage) override;
 
         shared_ptr<ITunerLnbCallback> mTunerLnbCallback;
     };
 
 private:
     int mId;
-    sp<ILnb> mLnb;
+    shared_ptr<ILnb> mLnb;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERFLNB_H
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index 5b4129a..4833aaf 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2020, The Android Open Source Project
+ * Copyright (c) 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,369 +14,342 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
 #define LOG_TAG "TunerService"
 
-#include <android/binder_manager.h>
-#include <android/content/pm/IPackageManagerNative.h>
-#include <binder/IServiceManager.h>
-#include <utils/Log.h>
 #include "TunerService.h"
-#include "TunerFrontend.h"
-#include "TunerLnb.h"
+
+#include <aidl/android/hardware/tv/tuner/IDemux.h>
+#include <aidl/android/hardware/tv/tuner/IDescrambler.h>
+#include <aidl/android/hardware/tv/tuner/IFrontend.h>
+#include <aidl/android/hardware/tv/tuner/ILnb.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <android/binder_manager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/PermissionCache.h>
+#include <utils/Log.h>
+
+#include <string>
+
 #include "TunerDemux.h"
 #include "TunerDescrambler.h"
+#include "TunerFrontend.h"
+#include "TunerHelper.h"
+#include "TunerLnb.h"
 
-using ::aidl::android::media::tv::tuner::TunerFrontendAnalogCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendAtsc3Capabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendAtscCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendCableCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendDvbsCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendDvbtCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendIsdbs3Capabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendIsdbsCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendIsdbtCapabilities;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
-using ::android::hardware::tv::tuner::V1_0::FrontendId;
-using ::android::hardware::tv::tuner::V1_0::FrontendType;
-using ::android::hardware::tv::tuner::V1_0::IFrontend;
-using ::android::hardware::tv::tuner::V1_0::ILnb;
-using ::android::hardware::tv::tuner::V1_0::LnbId;
-using ::android::hardware::tv::tuner::V1_0::Result;
-using ::android::hardware::tv::tuner::V1_1::FrontendDtmbCapabilities;
+using ::aidl::android::hardware::tv::tuner::IDemux;
+using ::aidl::android::hardware::tv::tuner::IDescrambler;
+using ::aidl::android::hardware::tv::tuner::IFrontend;
+using ::aidl::android::hardware::tv::tuner::Result;
+using ::android::IPCThreadState;
+using ::android::PermissionCache;
+using ::android::sp;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+shared_ptr<TunerService> TunerService::sTunerService = nullptr;
 
 TunerService::TunerService() {
-    sp<IServiceManager> serviceMgr = defaultServiceManager();
-    sp<content::pm::IPackageManagerNative> packageMgr;
-    if (serviceMgr.get() == nullptr) {
-        ALOGE("%s: Cannot find service manager", __func__);
-        return;
-    } else {
-        sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
-        packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
-    }
-
-    bool hasFeature = false;
-    if (packageMgr != nullptr) {
-        binder::Status status = packageMgr->hasSystemFeature(FEATURE_TUNER, 0, &hasFeature);
-        if (!status.isOk()) {
-            ALOGE("%s: hasSystemFeature failed: %s",
-                    __func__, status.exceptionMessage().c_str());
-            return;
-        }
-        if (!hasFeature) {
-            ALOGD("Current device does not support tuner feaure.");
-            return;
-        }
-    } else {
-        ALOGD("%s: Cannot find package manager.", __func__);
+    if (!TunerHelper::checkTunerFeature()) {
+        ALOGD("Device doesn't have tuner hardware.");
         return;
     }
 
-    ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
-    mTunerResourceManager = ITunerResourceManager::fromBinder(binder);
     updateTunerResources();
 }
 
 TunerService::~TunerService() {}
 
 binder_status_t TunerService::instantiate() {
-    shared_ptr<TunerService> service =
-            ::ndk::SharedRefBase::make<TunerService>();
-    return AServiceManager_addService(service->asBinder().get(), getServiceName());
+    sTunerService = ::ndk::SharedRefBase::make<TunerService>();
+    return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
+}
+
+shared_ptr<TunerService> TunerService::getTunerService() {
+    return sTunerService;
 }
 
 bool TunerService::hasITuner() {
-    ALOGD("hasITuner");
+    ALOGV("hasITuner");
     if (mTuner != nullptr) {
         return true;
     }
-    mTuner = ITuner::getService();
-    if (mTuner == nullptr) {
-        ALOGE("Failed to get ITuner service");
+    const string statsServiceName = string() + ITuner::descriptor + "/default";
+    if (AServiceManager_isDeclared(statsServiceName.c_str())) {
+        ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
+        mTuner = ITuner::fromBinder(binder);
+    } else {
+        mTuner = nullptr;
+        ALOGE("Failed to get Tuner HAL Service");
         return false;
     }
-    mTunerVersion = TUNER_HAL_VERSION_1_0;
-    mTuner_1_1 = ::android::hardware::tv::tuner::V1_1::ITuner::castFrom(mTuner);
-    if (mTuner_1_1 != nullptr) {
-        mTunerVersion = TUNER_HAL_VERSION_1_1;
-    } else {
-        ALOGE("Failed to get ITuner_1_1 service");
-    }
+
+    mTunerVersion = TUNER_HAL_VERSION_2_0;
+    // TODO: Enable this after Tuner HAL is frozen.
+    // if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
+    //  // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
+    //  mTunerVersion = (mTunerVersion + 1) << 16;
+    //}
+
     return true;
 }
 
-bool TunerService::hasITuner_1_1() {
-    ALOGD("hasITuner_1_1");
-    hasITuner();
-    return (mTunerVersion == TUNER_HAL_VERSION_1_1);
-}
-
-Status TunerService::openDemux(
-        int /* demuxHandle */, std::shared_ptr<ITunerDemux>* _aidl_return) {
-    ALOGD("openDemux");
+::ndk::ScopedAStatus TunerService::openDemux(int32_t /* in_demuxHandle */,
+                                             shared_ptr<ITunerDemux>* _aidl_return) {
+    ALOGV("openDemux");
     if (!hasITuner()) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::NOT_INITIALIZED));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
-    Result res;
-    uint32_t id;
-    sp<IDemux> demuxSp = nullptr;
-    shared_ptr<ITunerDemux> tunerDemux = nullptr;
-    mTuner->openDemux([&](Result r, uint32_t demuxId, const sp<IDemux>& demux) {
-        demuxSp = demux;
-        id = demuxId;
-        res = r;
-        ALOGD("open demux, id = %d", demuxId);
-    });
-    if (res == Result::SUCCESS) {
-        tunerDemux = ::ndk::SharedRefBase::make<TunerDemux>(demuxSp, id);
-        *_aidl_return = tunerDemux->ref<ITunerDemux>();
-        return Status::ok();
+    vector<int32_t> id;
+    shared_ptr<IDemux> demux;
+    auto status = mTuner->openDemux(&id, &demux);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, id[0]);
     }
 
-    ALOGW("open demux failed, res = %d", res);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    return status;
 }
 
-Status TunerService::getDemuxCaps(TunerDemuxCapabilities* _aidl_return) {
-    ALOGD("getDemuxCaps");
+::ndk::ScopedAStatus TunerService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
+    ALOGV("getDemuxCaps");
     if (!hasITuner()) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::NOT_INITIALIZED));
-    }
-    Result res;
-    DemuxCapabilities caps;
-    mTuner->getDemuxCaps([&](Result r, const DemuxCapabilities& demuxCaps) {
-        caps = demuxCaps;
-        res = r;
-    });
-    if (res == Result::SUCCESS) {
-        *_aidl_return = getAidlDemuxCaps(caps);
-        return Status::ok();
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    ALOGW("Get demux caps failed, res = %d", res);
-    return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    return mTuner->getDemuxCaps(_aidl_return);
 }
 
-Status TunerService::getFrontendIds(vector<int32_t>* ids) {
+::ndk::ScopedAStatus TunerService::getFrontendIds(vector<int32_t>* ids) {
     if (!hasITuner()) {
-        return Status::fromServiceSpecificError(
-                static_cast<int32_t>(Result::NOT_INITIALIZED));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
-    hidl_vec<FrontendId> feIds;
-    Result res = getHidlFrontendIds(feIds);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    ids->resize(feIds.size());
-    copy(feIds.begin(), feIds.end(), ids->begin());
 
-    return Status::ok();
+    return mTuner->getFrontendIds(ids);
 }
 
-Status TunerService::getFrontendInfo(int32_t id, TunerFrontendInfo* _aidl_return) {
+::ndk::ScopedAStatus TunerService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
     if (!hasITuner()) {
         ALOGE("ITuner service is not init.");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    FrontendInfo info;
-    Result res = getHidlFrontendInfo(id, info);
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-
-    TunerFrontendInfo tunerInfo = convertToAidlFrontendInfo(info);
-    *_aidl_return = tunerInfo;
-    return Status::ok();
+    return mTuner->getFrontendInfo(id, _aidl_return);
 }
 
-Status TunerService::getFrontendDtmbCapabilities(
-        int32_t id, TunerFrontendDtmbCapabilities* _aidl_return) {
-    if (!hasITuner_1_1()) {
-        ALOGE("ITuner_1_1 service is not init.");
+::ndk::ScopedAStatus TunerService::openFrontend(int32_t frontendHandle,
+                                                shared_ptr<ITunerFrontend>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("ITuner service is not init.");
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res;
-    FrontendDtmbCapabilities dtmbCaps;
-    mTuner_1_1->getFrontendDtmbCapabilities(id,
-            [&](Result r, const FrontendDtmbCapabilities& caps) {
-        dtmbCaps = caps;
-        res = r;
-    });
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
+    int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
+    shared_ptr<IFrontend> frontend;
+    auto status = mTuner->openFrontendById(id, &frontend);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerFrontend>(frontend, id);
     }
 
-    TunerFrontendDtmbCapabilities aidlDtmbCaps{
-        .transmissionModeCap = (int)dtmbCaps.transmissionModeCap,
-        .bandwidthCap = (int)dtmbCaps.bandwidthCap,
-        .modulationCap = (int)dtmbCaps.modulationCap,
-        .codeRateCap = (int)dtmbCaps.codeRateCap,
-        .guardIntervalCap = (int)dtmbCaps.guardIntervalCap,
-        .interleaveModeCap = (int)dtmbCaps.interleaveModeCap,
-    };
-
-    *_aidl_return = aidlDtmbCaps;
-    return Status::ok();
+    return status;
 }
 
-Status TunerService::openFrontend(
-        int32_t frontendHandle, shared_ptr<ITunerFrontend>* _aidl_return) {
-    if (!hasITuner()) {
-        ALOGE("ITuner service is not init.");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
-    }
-
-    Result status;
-    sp<IFrontend> frontend;
-    int id = getResourceIdFromHandle(frontendHandle, FRONTEND);
-    mTuner->openFrontendById(id, [&](Result result, const sp<IFrontend>& fe) {
-        frontend = fe;
-        status = result;
-    });
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerFrontend>(frontend, id);
-    return Status::ok();
-}
-
-Status TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+::ndk::ScopedAStatus TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
     if (!hasITuner()) {
         ALOGD("get ITuner failed");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    sp<ILnb> lnb;
-    int id = getResourceIdFromHandle(lnbHandle, LNB);
-    mTuner->openLnbById(id, [&](Result result, const sp<ILnb>& lnbSp){
-        lnb = lnbSp;
-        status = result;
-    });
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    shared_ptr<ILnb> lnb;
+    int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
+    auto status = mTuner->openLnbById(id, &lnb);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerLnb>(lnb, id);
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerLnb>(lnb, id);
-    return Status::ok();
+    return status;
 }
 
-Status TunerService::openLnbByName(const string& lnbName, shared_ptr<ITunerLnb>* _aidl_return) {
+::ndk::ScopedAStatus TunerService::openLnbByName(const string& lnbName,
+                                                 shared_ptr<ITunerLnb>* _aidl_return) {
     if (!hasITuner()) {
         ALOGE("get ITuner failed");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    int lnbId;
-    Result status;
-    sp<ILnb> lnb;
-    mTuner->openLnbByName(lnbName, [&](Result r, LnbId id, const sp<ILnb>& lnbSp) {
-        status = r;
-        lnb = lnbSp;
-        lnbId = (int)id;
-    });
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    vector<int32_t> id;
+    shared_ptr<ILnb> lnb;
+    auto status = mTuner->openLnbByName(lnbName, &id, &lnb);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerLnb>(lnb, id[0]);
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerLnb>(lnb, lnbId);
-    return Status::ok();
+    return ::ndk::ScopedAStatus::ok();
 }
 
-Status TunerService::openDescrambler(int32_t /*descramblerHandle*/,
-            std::shared_ptr<ITunerDescrambler>* _aidl_return) {
+::ndk::ScopedAStatus TunerService::openDescrambler(int32_t /*descramblerHandle*/,
+                                                   shared_ptr<ITunerDescrambler>* _aidl_return) {
     if (!hasITuner()) {
         ALOGD("get ITuner failed");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    sp<IDescrambler> descrambler;
-    //int id = getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
-    mTuner->openDescrambler([&](Result r, const sp<IDescrambler>& descramblerSp) {
-        status = r;
-        descrambler = descramblerSp;
-    });
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
+    shared_ptr<IDescrambler> descrambler;
+    // int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
+    auto status = mTuner->openDescrambler(&descrambler);
+    if (status.isOk()) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerDescrambler>(descrambler);
     }
 
-    *_aidl_return = ::ndk::SharedRefBase::make<TunerDescrambler>(descrambler);
-    return Status::ok();
+    return status;
+}
+
+::ndk::ScopedAStatus TunerService::getTunerHalVersion(int* _aidl_return) {
+    hasITuner();
+    *_aidl_return = mTunerVersion;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerService::openSharedFilter(const string& in_filterToken,
+                                                    const shared_ptr<ITunerFilterCallback>& in_cb,
+                                                    shared_ptr<ITunerFilter>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (!PermissionCache::checkCallingPermission(sSharedFilterPermission)) {
+        ALOGE("Request requires android.permission.ACCESS_TV_SHARED_FILTER");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Mutex::Autolock _l(mSharedFiltersLock);
+    if (mSharedFilters.find(in_filterToken) == mSharedFilters.end()) {
+        *_aidl_return = nullptr;
+        ALOGD("fail to find %s", in_filterToken.c_str());
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    shared_ptr<TunerFilter> filter = mSharedFilters.at(in_filterToken);
+    IPCThreadState* ipc = IPCThreadState::self();
+    const int pid = ipc->getCallingPid();
+    if (!filter->isSharedFilterAllowed(pid)) {
+        *_aidl_return = nullptr;
+        ALOGD("shared filter %s is opened in the same process", in_filterToken.c_str());
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    filter->attachSharedFilterCallback(in_cb);
+
+    *_aidl_return = filter;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mTuner->setLna(bEnable);
+}
+
+::ndk::ScopedAStatus TunerService::setMaxNumberOfFrontends(FrontendType in_frontendType,
+                                                           int32_t in_maxNumber) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mTuner->setMaxNumberOfFrontends(in_frontendType, in_maxNumber);
+}
+
+::ndk::ScopedAStatus TunerService::getMaxNumberOfFrontends(FrontendType in_frontendType,
+                                                           int32_t* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    return mTuner->getMaxNumberOfFrontends(in_frontendType, _aidl_return);
+}
+
+string TunerService::addFilterToShared(const shared_ptr<TunerFilter>& sharedFilter) {
+    Mutex::Autolock _l(mSharedFiltersLock);
+
+    // Use sharedFilter address as token.
+    string token = to_string(reinterpret_cast<std::uintptr_t>(sharedFilter.get()));
+    mSharedFilters[token] = sharedFilter;
+    return token;
+}
+
+void TunerService::removeSharedFilter(const shared_ptr<TunerFilter>& sharedFilter) {
+    Mutex::Autolock _l(mSharedFiltersLock);
+
+    // Use sharedFilter address as token.
+    mSharedFilters.erase(to_string(reinterpret_cast<std::uintptr_t>(sharedFilter.get())));
 }
 
 void TunerService::updateTunerResources() {
-    if (!hasITuner() || mTunerResourceManager == NULL) {
+    if (!hasITuner()) {
         ALOGE("Failed to updateTunerResources");
         return;
     }
 
-    updateFrontendResources();
-    updateLnbResources();
-    // TODO: update Demux, Descrambler.
+    TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
 }
 
-Status TunerService::getTunerHalVersion(int* _aidl_return) {
-    hasITuner();
-    *_aidl_return = mTunerVersion;
-    return Status::ok();
-}
-
-void TunerService::updateFrontendResources() {
-    hidl_vec<FrontendId> ids;
-    Result res = getHidlFrontendIds(ids);
-    if (res != Result::SUCCESS) {
-        return;
-    }
+vector<TunerFrontendInfo> TunerService::getTRMFrontendInfos() {
     vector<TunerFrontendInfo> infos;
+    vector<int32_t> ids;
+    auto status = mTuner->getFrontendIds(&ids);
+    if (!status.isOk()) {
+        return infos;
+    }
+
     for (int i = 0; i < ids.size(); i++) {
         FrontendInfo frontendInfo;
-        Result res = getHidlFrontendInfo((int)ids[i], frontendInfo);
-        if (res != Result::SUCCESS) {
+        auto res = mTuner->getFrontendInfo(ids[i], &frontendInfo);
+        if (!res.isOk()) {
             continue;
         }
         TunerFrontendInfo tunerFrontendInfo{
-            .handle = getResourceHandleFromId((int)ids[i], FRONTEND),
-            .type = static_cast<int>(frontendInfo.type),
-            .exclusiveGroupId = static_cast<int>(frontendInfo.exclusiveGroupId),
+                .handle = TunerHelper::getResourceHandleFromId((int)ids[i], FRONTEND),
+                .type = static_cast<int>(frontendInfo.type),
+                .exclusiveGroupId = frontendInfo.exclusiveGroupId,
         };
         infos.push_back(tunerFrontendInfo);
     }
-    mTunerResourceManager->setFrontendInfoList(infos);
+
+    return infos;
 }
 
-void TunerService::updateLnbResources() {
-    vector<int> handles = getLnbHandles();
-    if (handles.size() == 0) {
-        return;
-    }
-    mTunerResourceManager->setLnbInfoList(handles);
-}
-
-vector<int> TunerService::getLnbHandles() {
-    vector<int> lnbHandles;
-    if (mTuner != NULL) {
-        Result res;
-        vector<LnbId> lnbIds;
-        mTuner->getLnbIds([&](Result r, const hardware::hidl_vec<LnbId>& ids) {
-            lnbIds = ids;
-            res = r;
-        });
-        if (res != Result::SUCCESS || lnbIds.size() == 0) {
-        } else {
+vector<int32_t> TunerService::getTRMLnbHandles() {
+    vector<int32_t> lnbHandles;
+    if (mTuner != nullptr) {
+        vector<int32_t> lnbIds;
+        auto res = mTuner->getLnbIds(&lnbIds);
+        if (res.isOk()) {
             for (int i = 0; i < lnbIds.size(); i++) {
-                lnbHandles.push_back(getResourceHandleFromId((int)lnbIds[i], LNB));
+                lnbHandles.push_back(TunerHelper::getResourceHandleFromId(lnbIds[i], LNB));
             }
         }
     }
@@ -384,186 +357,8 @@
     return lnbHandles;
 }
 
-Result TunerService::getHidlFrontendIds(hidl_vec<FrontendId>& ids) {
-    if (mTuner == NULL) {
-        return Result::NOT_INITIALIZED;
-    }
-    Result res;
-    mTuner->getFrontendIds([&](Result r, const hidl_vec<FrontendId>& frontendIds) {
-        ids = frontendIds;
-        res = r;
-    });
-    return res;
-}
-
-Result TunerService::getHidlFrontendInfo(int id, FrontendInfo& info) {
-    if (mTuner == NULL) {
-        return Result::NOT_INITIALIZED;
-    }
-    Result res;
-    mTuner->getFrontendInfo(id, [&](Result r, const FrontendInfo& feInfo) {
-        info = feInfo;
-        res = r;
-    });
-    return res;
-}
-
-TunerDemuxCapabilities TunerService::getAidlDemuxCaps(DemuxCapabilities caps) {
-    TunerDemuxCapabilities aidlCaps{
-        .numDemux = (int)caps.numDemux,
-        .numRecord = (int)caps.numRecord,
-        .numPlayback = (int)caps.numPlayback,
-        .numTsFilter = (int)caps.numTsFilter,
-        .numSectionFilter = (int)caps.numSectionFilter,
-        .numAudioFilter = (int)caps.numAudioFilter,
-        .numVideoFilter = (int)caps.numVideoFilter,
-        .numPesFilter = (int)caps.numPesFilter,
-        .numPcrFilter = (int)caps.numPcrFilter,
-        .numBytesInSectionFilter = (int)caps.numBytesInSectionFilter,
-        .filterCaps = (int)caps.filterCaps,
-        .bTimeFilter = caps.bTimeFilter,
-    };
-    aidlCaps.linkCaps.resize(caps.linkCaps.size());
-    copy(caps.linkCaps.begin(), caps.linkCaps.end(), aidlCaps.linkCaps.begin());
-    return aidlCaps;
-}
-
-TunerFrontendInfo TunerService::convertToAidlFrontendInfo(FrontendInfo halInfo) {
-    TunerFrontendInfo info{
-        .type = (int)halInfo.type,
-        .minFrequency = (int)halInfo.minFrequency,
-        .maxFrequency = (int)halInfo.maxFrequency,
-        .minSymbolRate = (int)halInfo.minSymbolRate,
-        .maxSymbolRate = (int)halInfo.maxSymbolRate,
-        .acquireRange = (int)halInfo.acquireRange,
-        .exclusiveGroupId = (int)halInfo.exclusiveGroupId,
-    };
-    for (int i = 0; i < halInfo.statusCaps.size(); i++) {
-        info.statusCaps.push_back((int)halInfo.statusCaps[i]);
-    }
-
-    TunerFrontendCapabilities caps;
-    switch (halInfo.type) {
-        case FrontendType::ANALOG: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::analogCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendAnalogCapabilities analogCaps{
-                    .typeCap = (int)halInfo.frontendCaps.analogCaps().typeCap,
-                    .sifStandardCap = (int)halInfo.frontendCaps.analogCaps().sifStandardCap,
-                };
-                caps.set<TunerFrontendCapabilities::analogCaps>(analogCaps);
-            }
-            break;
-        }
-        case FrontendType::ATSC: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::atscCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendAtscCapabilities atscCaps{
-                    .modulationCap = (int)halInfo.frontendCaps.atscCaps().modulationCap,
-                };
-                caps.set<TunerFrontendCapabilities::atscCaps>(atscCaps);
-            }
-            break;
-        }
-        case FrontendType::ATSC3: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::atsc3Caps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendAtsc3Capabilities atsc3Caps{
-                    .bandwidthCap = (int)halInfo.frontendCaps.atsc3Caps().bandwidthCap,
-                    .modulationCap = (int)halInfo.frontendCaps.atsc3Caps().modulationCap,
-                    .timeInterleaveModeCap =
-                            (int)halInfo.frontendCaps.atsc3Caps().timeInterleaveModeCap,
-                    .codeRateCap = (int)halInfo.frontendCaps.atsc3Caps().codeRateCap,
-                    .demodOutputFormatCap
-                        = (int)halInfo.frontendCaps.atsc3Caps().demodOutputFormatCap,
-                    .fecCap = (int)halInfo.frontendCaps.atsc3Caps().fecCap,
-                };
-                caps.set<TunerFrontendCapabilities::atsc3Caps>(atsc3Caps);
-            }
-            break;
-        }
-        case FrontendType::DVBC: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbcCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendCableCapabilities cableCaps{
-                    .modulationCap = (int)halInfo.frontendCaps.dvbcCaps().modulationCap,
-                    .codeRateCap = (int64_t)halInfo.frontendCaps.dvbcCaps().fecCap,
-                    .annexCap = (int)halInfo.frontendCaps.dvbcCaps().annexCap,
-                };
-                caps.set<TunerFrontendCapabilities::cableCaps>(cableCaps);
-            }
-            break;
-        }
-        case FrontendType::DVBS: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbsCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendDvbsCapabilities dvbsCaps{
-                    .modulationCap = (int)halInfo.frontendCaps.dvbsCaps().modulationCap,
-                    .codeRateCap = (long)halInfo.frontendCaps.dvbsCaps().innerfecCap,
-                    .standard = (int)halInfo.frontendCaps.dvbsCaps().standard,
-                };
-                caps.set<TunerFrontendCapabilities::dvbsCaps>(dvbsCaps);
-            }
-            break;
-        }
-        case FrontendType::DVBT: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbtCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendDvbtCapabilities dvbtCaps{
-                    .transmissionModeCap = (int)halInfo.frontendCaps.dvbtCaps().transmissionModeCap,
-                    .bandwidthCap = (int)halInfo.frontendCaps.dvbtCaps().bandwidthCap,
-                    .constellationCap = (int)halInfo.frontendCaps.dvbtCaps().constellationCap,
-                    .codeRateCap = (int)halInfo.frontendCaps.dvbtCaps().coderateCap,
-                    .hierarchyCap = (int)halInfo.frontendCaps.dvbtCaps().hierarchyCap,
-                    .guardIntervalCap = (int)halInfo.frontendCaps.dvbtCaps().guardIntervalCap,
-                    .isT2Supported = (bool)halInfo.frontendCaps.dvbtCaps().isT2Supported,
-                    .isMisoSupported = (bool)halInfo.frontendCaps.dvbtCaps().isMisoSupported,
-                };
-                caps.set<TunerFrontendCapabilities::dvbtCaps>(dvbtCaps);
-            }
-            break;
-        }
-        case FrontendType::ISDBS: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbsCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendIsdbsCapabilities isdbsCaps{
-                    .modulationCap = (int)halInfo.frontendCaps.isdbsCaps().modulationCap,
-                    .codeRateCap = (int)halInfo.frontendCaps.isdbsCaps().coderateCap,
-                };
-                caps.set<TunerFrontendCapabilities::isdbsCaps>(isdbsCaps);
-            }
-            break;
-        }
-        case FrontendType::ISDBS3: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbs3Caps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendIsdbs3Capabilities isdbs3Caps{
-                    .modulationCap = (int)halInfo.frontendCaps.isdbs3Caps().modulationCap,
-                    .codeRateCap = (int)halInfo.frontendCaps.isdbs3Caps().coderateCap,
-                };
-                caps.set<TunerFrontendCapabilities::isdbs3Caps>(isdbs3Caps);
-            }
-            break;
-        }
-        case FrontendType::ISDBT: {
-            if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbtCaps
-                    == halInfo.frontendCaps.getDiscriminator()) {
-                TunerFrontendIsdbtCapabilities isdbtCaps{
-                    .modeCap = (int)halInfo.frontendCaps.isdbtCaps().modeCap,
-                    .bandwidthCap = (int)halInfo.frontendCaps.isdbtCaps().bandwidthCap,
-                    .modulationCap = (int)halInfo.frontendCaps.isdbtCaps().modulationCap,
-                    .codeRateCap = (int)halInfo.frontendCaps.isdbtCaps().coderateCap,
-                    .guardIntervalCap = (int)halInfo.frontendCaps.isdbtCaps().guardIntervalCap,
-                };
-                caps.set<TunerFrontendCapabilities::isdbtCaps>(isdbtCaps);
-            }
-            break;
-        }
-        default:
-            break;
-    }
-
-    info.caps = caps;
-    return info;
-}
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index f8e2ee6..7fc2aa4 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2020, The Android Open Source Project
+ * Copyright (c) 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -17,141 +17,95 @@
 #ifndef ANDROID_MEDIA_TUNERSERVICE_H
 #define ANDROID_MEDIA_TUNERSERVICE_H
 
-#include <aidl/android/media/tv/tunerresourcemanager/ITunerResourceManager.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterEvent.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterStatus.h>
+#include <aidl/android/hardware/tv/tuner/ITuner.h>
 #include <aidl/android/media/tv/tuner/BnTunerService.h>
-#include <android/hardware/tv/tuner/1.1/ITuner.h>
-#include <fmq/AidlMessageQueue.h>
-#include <fmq/EventFlag.h>
-#include <fmq/MessageQueue.h>
+#include <aidl/android/media/tv/tunerresourcemanager/TunerFrontendInfo.h>
+#include <utils/Mutex.h>
 
-using ::aidl::android::hardware::common::fmq::GrantorDescriptor;
-using ::aidl::android::hardware::common::fmq::MQDescriptor;
-using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+#include <map>
+
+#include "TunerFilter.h"
+#include "TunerHelper.h"
+
+using ::aidl::android::hardware::tv::tuner::DemuxCapabilities;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::FrontendInfo;
+using ::aidl::android::hardware::tv::tuner::FrontendType;
+using ::aidl::android::hardware::tv::tuner::ITuner;
 using ::aidl::android::media::tv::tuner::BnTunerService;
 using ::aidl::android::media::tv::tuner::ITunerDemux;
-using ::aidl::android::media::tv::tuner::ITunerDescrambler;
+using ::aidl::android::media::tv::tuner::ITunerFilter;
+using ::aidl::android::media::tv::tuner::ITunerFilterCallback;
 using ::aidl::android::media::tv::tuner::ITunerFrontend;
 using ::aidl::android::media::tv::tuner::ITunerLnb;
-using ::aidl::android::media::tv::tuner::TunerDemuxCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendDtmbCapabilities;
-using ::aidl::android::media::tv::tuner::TunerFrontendInfo;
-using ::aidl::android::media::tv::tunerresourcemanager::ITunerResourceManager;
-
-using ::android::hardware::details::logError;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::kSynchronizedReadWrite;
-using ::android::hardware::EventFlag;
-using ::android::hardware::MessageQueue;
-using ::android::hardware::MQDescriptorSync;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hardware::tv::tuner::V1_0::DemuxCapabilities;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
-using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
-using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
-using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
-using ::android::hardware::tv::tuner::V1_0::FrontendId;
-using ::android::hardware::tv::tuner::V1_0::FrontendInfo;
-using ::android::hardware::tv::tuner::V1_0::IDemux;
-using ::android::hardware::tv::tuner::V1_0::IDescrambler;
-using ::android::hardware::tv::tuner::V1_0::IFilter;
-using ::android::hardware::tv::tuner::V1_0::IFilterCallback;
-using ::android::hardware::tv::tuner::V1_0::ITuner;
-using ::android::hardware::tv::tuner::V1_0::Result;
-
-using Status = ::ndk::ScopedAStatus;
+using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
+using ::android::Mutex;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
-
-const static int TUNER_HAL_VERSION_UNKNOWN = 0;
-const static int TUNER_HAL_VERSION_1_0 = 1 << 16;
-const static int TUNER_HAL_VERSION_1_1 = (1 << 16) | 1;
-// System Feature defined in PackageManager
-static const ::android::String16 FEATURE_TUNER(::android::String16("android.hardware.tv.tuner"));
-
-typedef enum {
-    FRONTEND,
-    LNB,
-    DEMUX,
-    DESCRAMBLER,
-} TunerResourceType;
-
-struct FilterCallback : public IFilterCallback {
-    ~FilterCallback() {}
-    Return<void> onFilterEvent(const DemuxFilterEvent&) {
-        return Void();
-    }
-    Return<void> onFilterStatus(const DemuxFilterStatus) {
-        return Void();
-    }
-};
+namespace media {
+namespace tv {
+namespace tuner {
 
 class TunerService : public BnTunerService {
-    typedef AidlMessageQueue<int8_t, SynchronizedReadWrite> AidlMessageQueue;
-    typedef MessageQueue<uint8_t, kSynchronizedReadWrite> HidlMessageQueue;
-    typedef MQDescriptor<int8_t, SynchronizedReadWrite> AidlMQDesc;
-
 public:
     static char const *getServiceName() { return "media.tuner"; }
     static binder_status_t instantiate();
     TunerService();
     virtual ~TunerService();
 
-    Status getFrontendIds(vector<int32_t>* ids) override;
-    Status getFrontendInfo(int32_t id, TunerFrontendInfo* _aidl_return) override;
-    Status getFrontendDtmbCapabilities(
-            int32_t id, TunerFrontendDtmbCapabilities* _aidl_return) override;
-    Status openFrontend(
-            int32_t frontendHandle, shared_ptr<ITunerFrontend>* _aidl_return) override;
-    Status openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) override;
-    Status openLnbByName(const string& lnbName, shared_ptr<ITunerLnb>* _aidl_return) override;
-    Status openDemux(int32_t demuxHandle, std::shared_ptr<ITunerDemux>* _aidl_return) override;
-    Status getDemuxCaps(TunerDemuxCapabilities* _aidl_return) override;
-    Status openDescrambler(int32_t descramblerHandle,
-            std::shared_ptr<ITunerDescrambler>* _aidl_return) override;
-    Status getTunerHalVersion(int* _aidl_return) override;
+    ::ndk::ScopedAStatus getFrontendIds(vector<int32_t>* out_ids) override;
+    ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendHandle,
+                                         FrontendInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus openFrontend(int32_t in_frontendHandle,
+                                      shared_ptr<ITunerFrontend>* _aidl_return) override;
+    ::ndk::ScopedAStatus openLnb(int32_t in_lnbHandle,
+                                 shared_ptr<ITunerLnb>* _aidl_return) override;
+    ::ndk::ScopedAStatus openLnbByName(const string& in_lnbName,
+                                       shared_ptr<ITunerLnb>* _aidl_return) override;
+    ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
+                                   shared_ptr<ITunerDemux>* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
+    ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
+                                         shared_ptr<ITunerDescrambler>* _aidl_return) override;
+    ::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
+                                          const shared_ptr<ITunerFilterCallback>& in_cb,
+                                          shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus setLna(bool in_bEnable) override;
+    ::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
+                                                 int32_t in_maxNumber) override;
+    ::ndk::ScopedAStatus getMaxNumberOfFrontends(FrontendType in_frontendType,
+                                                 int32_t* _aidl_return) override;
 
-    // TODO: create a map between resource id and handles.
-    static int getResourceIdFromHandle(int resourceHandle, int /*type*/) {
-        return (resourceHandle & 0x00ff0000) >> 16;
-    }
+    string addFilterToShared(const shared_ptr<TunerFilter>& sharedFilter);
+    void removeSharedFilter(const shared_ptr<TunerFilter>& sharedFilter);
 
-    int getResourceHandleFromId(int id, int resourceType) {
-        // TODO: build up randomly generated id to handle mapping
-        return (resourceType & 0x000000ff) << 24
-                | (id << 16)
-                | (mResourceRequestCount++ & 0xffff);
-    }
+    static shared_ptr<TunerService> getTunerService();
 
 private:
     bool hasITuner();
-    bool hasITuner_1_1();
     void updateTunerResources();
+    vector<TunerFrontendInfo> getTRMFrontendInfos();
+    vector<int32_t> getTRMLnbHandles();
 
-    void updateFrontendResources();
-    void updateLnbResources();
-    Result getHidlFrontendIds(hidl_vec<FrontendId>& ids);
-    Result getHidlFrontendInfo(int id, FrontendInfo& info);
-    vector<int> getLnbHandles();
-
-    TunerDemuxCapabilities getAidlDemuxCaps(DemuxCapabilities caps);
-    TunerFrontendInfo convertToAidlFrontendInfo(FrontendInfo halInfo);
-
-    sp<ITuner> mTuner;
-    sp<::android::hardware::tv::tuner::V1_1::ITuner> mTuner_1_1;
-
-    shared_ptr<ITunerResourceManager> mTunerResourceManager;
-    int mResourceRequestCount = 0;
-
+    shared_ptr<ITuner> mTuner;
     int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
+    Mutex mSharedFiltersLock;
+    map<string, shared_ptr<TunerFilter>> mSharedFilters;
+
+    static shared_ptr<TunerService> sTunerService;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERSERVICE_H
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index ea9da30..73cd6b4 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -18,97 +18,91 @@
 
 #include "TunerTimeFilter.h"
 
-using ::android::hardware::tv::tuner::V1_0::Result;
-using ::android::hardware::tv::tuner::V1_1::Constant64Bit;
+#include <aidl/android/hardware/tv/tuner/Constant64Bit.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
 
+using ::aidl::android::hardware::tv::tuner::Constant64Bit;
+using ::aidl::android::hardware::tv::tuner::Result;
+
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
-TunerTimeFilter::TunerTimeFilter(sp<ITimeFilter> timeFilter) {
+TunerTimeFilter::TunerTimeFilter(shared_ptr<ITimeFilter> timeFilter) {
     mTimeFilter = timeFilter;
 }
 
 TunerTimeFilter::~TunerTimeFilter() {
-    mTimeFilter = NULL;
+    mTimeFilter = nullptr;
 }
 
-Status TunerTimeFilter::setTimeStamp(int64_t timeStamp) {
-    if (mTimeFilter == NULL) {
+::ndk::ScopedAStatus TunerTimeFilter::setTimeStamp(int64_t timeStamp) {
+    if (mTimeFilter == nullptr) {
         ALOGE("ITimeFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mTimeFilter->setTimeStamp(timeStamp);
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mTimeFilter->setTimeStamp(timeStamp);
 }
 
-Status TunerTimeFilter::clearTimeStamp() {
-    if (mTimeFilter == NULL) {
+::ndk::ScopedAStatus TunerTimeFilter::clearTimeStamp() {
+    if (mTimeFilter == nullptr) {
         ALOGE("ITimeFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status = mTimeFilter->clearTimeStamp();
-    if (status != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
-    }
-    return Status::ok();
+    return mTimeFilter->clearTimeStamp();
 }
 
-Status TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
-    if (mTimeFilter == NULL) {
+::ndk::ScopedAStatus TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
+    if (mTimeFilter == nullptr) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
         ALOGE("ITimeFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    mTimeFilter->getSourceTime(
-            [&](Result r, uint64_t t) {
-                status = r;
-                *_aidl_return = t;
-            });
-    if (status != Result::SUCCESS) {
+    auto status = mTimeFilter->getSourceTime(_aidl_return);
+    if (!status.isOk()) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::ok();
+    return status;
 }
 
-Status TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
-    if (mTimeFilter == NULL) {
+::ndk::ScopedAStatus TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
+    if (mTimeFilter == nullptr) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
         ALOGE("ITimeFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result status;
-    mTimeFilter->getTimeStamp(
-            [&](Result r, uint64_t t) {
-                status = r;
-                *_aidl_return = t;
-            });
-    if (status != Result::SUCCESS) {
+    auto status = mTimeFilter->getTimeStamp(_aidl_return);
+    if (!status.isOk()) {
         *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
-        return Status::fromServiceSpecificError(static_cast<int32_t>(status));
     }
-    return Status::ok();
+    return status;
 }
 
-Status TunerTimeFilter::close() {
-    if (mTimeFilter == NULL) {
+::ndk::ScopedAStatus TunerTimeFilter::close() {
+    if (mTimeFilter == nullptr) {
         ALOGE("ITimeFilter is not initialized");
-        return Status::fromServiceSpecificError(static_cast<int32_t>(Result::UNAVAILABLE));
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
     }
 
-    Result res = mTimeFilter->close();
-    mTimeFilter = NULL;
+    auto status = mTimeFilter->close();
+    mTimeFilter = nullptr;
 
-    if (res != Result::SUCCESS) {
-        return Status::fromServiceSpecificError(static_cast<int32_t>(res));
-    }
-    return Status::ok();
+    return status;
 }
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
 }  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/TunerTimeFilter.h b/services/tuner/TunerTimeFilter.h
index d675319..31a47cd 100644
--- a/services/tuner/TunerTimeFilter.h
+++ b/services/tuner/TunerTimeFilter.h
@@ -17,38 +17,40 @@
 #ifndef ANDROID_MEDIA_TUNERFTIMEFILTER_H
 #define ANDROID_MEDIA_TUNERFTIMEFILTER_H
 
+#include <aidl/android/hardware/tv/tuner/ITimeFilter.h>
 #include <aidl/android/media/tv/tuner/BnTunerTimeFilter.h>
-#include <android/hardware/tv/tuner/1.0/ITimeFilter.h>
-#include <android/hardware/tv/tuner/1.1/types.h>
-#include <media/stagefright/foundation/ADebug.h>
 #include <utils/Log.h>
 
-using Status = ::ndk::ScopedAStatus;
-using ::aidl::android::media::tv::tuner::BnTunerTimeFilter;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::tv::tuner::V1_0::ITimeFilter;
+using ::aidl::android::hardware::tv::tuner::ITimeFilter;
 
 using namespace std;
 
+namespace aidl {
 namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
 
 class TunerTimeFilter : public BnTunerTimeFilter {
 
 public:
-    TunerTimeFilter(sp<ITimeFilter> timeFilter);
+    TunerTimeFilter(shared_ptr<ITimeFilter> timeFilter);
     virtual ~TunerTimeFilter();
-    Status setTimeStamp(int64_t timeStamp) override;
-    Status clearTimeStamp() override;
-    Status getSourceTime(int64_t* _aidl_return) override;
-    Status getTimeStamp(int64_t* _aidl_return) override;
-    Status close() override;
+
+    ::ndk::ScopedAStatus setTimeStamp(int64_t in_timeStamp) override;
+    ::ndk::ScopedAStatus clearTimeStamp() override;
+    ::ndk::ScopedAStatus getSourceTime(int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getTimeStamp(int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus close() override;
 
 private:
-    sp<ITimeFilter> mTimeFilter;
+    shared_ptr<ITimeFilter> mTimeFilter;
 };
 
-} // namespace android
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
 
 #endif // ANDROID_MEDIA_TUNERFTIMEFILTER_H
diff --git a/services/tuner/aidl/android/media/tv/OWNERS b/services/tuner/aidl/android/media/tv/OWNERS
index 0ceb8e8..bf9fe34 100644
--- a/services/tuner/aidl/android/media/tv/OWNERS
+++ b/services/tuner/aidl/android/media/tv/OWNERS
@@ -1,2 +1,2 @@
-nchalko@google.com
+hgchen@google.com
 quxiangfang@google.com
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl
index 73b00ae..fa326b2 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDemux.aidl
@@ -16,6 +16,8 @@
 
 package android.media.tv.tuner;
 
+import android.hardware.tv.tuner.DemuxFilterType;
+import android.hardware.tv.tuner.DvrType;
 import android.media.tv.tuner.ITunerDvr;
 import android.media.tv.tuner.ITunerDvrCallback;
 import android.media.tv.tuner.ITunerFilter;
@@ -36,10 +38,15 @@
     void setFrontendDataSource(in ITunerFrontend frontend);
 
     /**
+     * Set a frontend resource by ID as data input of the demux
+     */
+    void setFrontendDataSourceById(in int frontendId);
+
+    /**
      * Open a new filter in the demux
      */
-    ITunerFilter openFilter(
-        in int mainType, in int subtype, in int bufferSize, in ITunerFilterCallback cb);
+    ITunerFilter openFilter(in DemuxFilterType type, in int bufferSize,
+        in ITunerFilterCallback cb);
 
     /**
      * Open time filter of the demux.
@@ -59,7 +66,7 @@
     /**
      * Open a DVR (Digital Video Record) instance in the demux.
      */
-    ITunerDvr openDvr(in int dvbType, in int bufferSize, in ITunerDvrCallback cb);
+    ITunerDvr openDvr(in DvrType dvbType, in int bufferSize, in ITunerDvrCallback cb);
 
     /**
      * Connect Conditional Access Modules (CAM) through Common Interface (CI).
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl
index 7370eee..39d193c 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDescrambler.aidl
@@ -16,9 +16,9 @@
 
 package android.media.tv.tuner;
 
+import android.hardware.tv.tuner.DemuxPid;
 import android.media.tv.tuner.ITunerDemux;
 import android.media.tv.tuner.ITunerFilter;
-import android.media.tv.tuner.TunerDemuxPid;
 
 /**
  * Tuner Demux interface handles tuner related operations.
@@ -39,12 +39,12 @@
     /**
      * Add packets' PID to the descrambler for descrambling.
      */
-    void addPid(in TunerDemuxPid pid, in ITunerFilter optionalSourceFilter);
+    void addPid(in DemuxPid pid, in ITunerFilter optionalSourceFilter);
 
     /**
      * Remove packets' PID from the descrambler.
      */
-    void removePid(in TunerDemuxPid pid, in ITunerFilter optionalSourceFilter);
+    void removePid(in DemuxPid pid, in ITunerFilter optionalSourceFilter);
 
     /**
      * Close a new interface of ITunerDescrambler.
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
index 8f1601b..2c01c4e 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
@@ -18,8 +18,8 @@
 
 import android.hardware.common.fmq.MQDescriptor;
 import android.hardware.common.fmq.SynchronizedReadWrite;
+import android.hardware.tv.tuner.DvrSettings;
 import android.media.tv.tuner.ITunerFilter;
-import android.media.tv.tuner.TunerDvrSettings;
 
 /**
  * Tuner Dvr interface handles tuner related operations.
@@ -35,7 +35,7 @@
     /**
      * Configure the DVR.
      */
-    void configure(in TunerDvrSettings settings);
+    void configure(in DvrSettings settings);
 
     /**
      * Attach one filter to DVR interface for recording.
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl
index e234fe5..3043d24 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvrCallback.aidl
@@ -16,6 +16,9 @@
 
 package android.media.tv.tuner;
 
+import android.hardware.tv.tuner.PlaybackStatus;
+import android.hardware.tv.tuner.RecordStatus;
+
 /**
  * TunerDvrCallback interface handles tuner dvr related callbacks.
  *
@@ -25,10 +28,10 @@
     /**
      * Notify the client a new status of the demux's record.
      */
-    void onRecordStatus(in int status);
+    void onRecordStatus(in RecordStatus status);
 
     /**
      * Notify the client a new status of the demux's playback.
      */
-    void onPlaybackStatus(in int status);
+    void onPlaybackStatus(in PlaybackStatus status);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl
index 10d4c3b..dc40f03 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFilter.aidl
@@ -19,8 +19,11 @@
 import android.hardware.common.fmq.MQDescriptor;
 import android.hardware.common.fmq.SynchronizedReadWrite;
 import android.hardware.common.NativeHandle;
-import android.media.tv.tuner.TunerFilterConfiguration;
-import android.media.tv.tuner.TunerFilterSharedHandleInfo;
+import android.hardware.tv.tuner.DemuxFilterSettings;
+import android.hardware.tv.tuner.DemuxFilterType;
+import android.hardware.tv.tuner.AvStreamType;
+import android.hardware.tv.tuner.DemuxFilterMonitorEventType;
+import android.hardware.tv.tuner.FilterDelayHint;
 
 /**
  * Tuner Filter interface handles tuner related operations.
@@ -46,12 +49,12 @@
     /**
      * Configure the filter.
      */
-    void configure(in TunerFilterConfiguration config);
+    void configure(in DemuxFilterSettings settings);
 
     /**
      * Configure the monitor event of the Filter.
      */
-    void configureMonitorEvent(in int monitorEventType);
+    void configureMonitorEvent(in int monitorEventTypes);
 
     /**
      * Configure the context id of the IP Filter.
@@ -61,12 +64,12 @@
     /**
      * Configure the stream type of the media Filter.
      */
-    void configureAvStreamType(in int avStreamType);
+    void configureAvStreamType(in AvStreamType avStreamType);
 
     /**
      * Get the a/v shared memory handle
      */
-    TunerFilterSharedHandleInfo getAvSharedHandleInfo();
+    long getAvSharedHandle(out NativeHandle avMemory);
 
     /**
      * Release the handle reported by the HAL for AV memory.
@@ -97,4 +100,28 @@
      * Close the filter.
      */
     void close();
+
+    /**
+     * Acquire a new SharedFilter token.
+     *
+     * @return a token of the newly created SharedFilter instance.
+     */
+    String acquireSharedFilterToken();
+
+    /**
+     * Free a SharedFilter token.
+     *
+     * @param filterToken the SharedFilter token will be released.
+     * @return a token of the newly created SharedFilter instance.
+     */
+    void freeSharedFilterToken(in String filterToken);
+
+    /**
+     * Get filter type.
+     *
+     * @return filter type.
+     */
+    DemuxFilterType getFilterType();
+
+    void setDelayHint(in FilterDelayHint hint);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl
index e7a52a7..6c53042 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFilterCallback.aidl
@@ -16,7 +16,8 @@
 
 package android.media.tv.tuner;
 
-import android.media.tv.tuner.TunerFilterEvent;
+import android.hardware.tv.tuner.DemuxFilterEvent;
+import android.hardware.tv.tuner.DemuxFilterStatus;
 
 /**
  * TunerFilterCallback interface handles tuner filter related callbacks.
@@ -27,10 +28,10 @@
     /**
      * Notify the client a new status of a filter.
      */
-    void onFilterStatus(int status);
+    void onFilterStatus(in DemuxFilterStatus status);
 
     /**
      * Notify the client that a new filter event happened.
      */
-    void onFilterEvent(in TunerFilterEvent[] filterEvent);
+    void onFilterEvent(in DemuxFilterEvent[] events);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
index ef0255a..0493f05 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontend.aidl
@@ -1,5 +1,5 @@
 /**
- * Copyright 2020, The Android Open Source Project
+ * Copyright 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -16,10 +16,13 @@
 
 package android.media.tv.tuner;
 
+import android.hardware.tv.tuner.FrontendScanType;
+import android.hardware.tv.tuner.FrontendSettings;
+import android.hardware.tv.tuner.FrontendStatus;
+import android.hardware.tv.tuner.FrontendStatusReadiness;
+import android.hardware.tv.tuner.FrontendStatusType;
 import android.media.tv.tuner.ITunerFrontendCallback;
 import android.media.tv.tuner.ITunerLnb;
-import android.media.tv.tuner.TunerFrontendSettings;
-import android.media.tv.tuner.TunerFrontendStatus;
 
 /**
  * Tuner Frontend interface handles frontend related operations.
@@ -39,7 +42,7 @@
      *
      * @param settings the settings to tune with.
      */
-    void tune(in TunerFrontendSettings settings);
+    void tune(in FrontendSettings settings);
 
     /**
      * Stop the previous tuning.
@@ -52,7 +55,7 @@
      * @param settings the settings to scan with.
      * @param frontendScanType scan with given type.
      */
-    void scan(in TunerFrontendSettings settings, in int frontendScanType);
+    void scan(in FrontendSettings settings, in FrontendScanType frontendScanType);
 
     /**
      * Stop the previous scanning.
@@ -67,13 +70,6 @@
     void setLnb(in ITunerLnb lnb);
 
     /**
-     * Enable or Disable Low Noise Amplifier (LNA).
-     *
-     * @param bEnable enable Lna or not.
-     */
-    void setLna(in boolean bEnable);
-
-    /**
      * Link Frontend to the cicam with given id.
      *
      * @return lts id
@@ -93,15 +89,25 @@
     /**
      * Gets the statuses of the frontend.
      */
-    TunerFrontendStatus[] getStatus(in int[] statusTypes);
-
-    /**
-     * Gets the 1.1 extended statuses of the frontend.
-     */
-    TunerFrontendStatus[] getStatusExtended_1_1(in int[] statusTypes);
+    FrontendStatus[] getStatus(in FrontendStatusType[] statusTypes);
 
     /**
      * Gets the id of the frontend.
      */
     int getFrontendId();
+
+    /**
+     * Request hardware information about the frontend.
+     */
+    String getHardwareInfo();
+
+    /**
+     * Filter out unnecessary PID from frontend output.
+     */
+    void removeOutputPid(int pid);
+
+    /**
+     * Gets FrontendStatus’ readiness statuses for given status types.
+     */
+    FrontendStatusReadiness[] getFrontendStatusReadiness(in FrontendStatusType[] statusTypes);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
index c92f5ee..d0ab11d 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerFrontendCallback.aidl
@@ -1,5 +1,5 @@
 /**
- * Copyright 2020, The Android Open Source Project
+ * Copyright 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -16,7 +16,9 @@
 
 package android.media.tv.tuner;
 
-import android.media.tv.tuner.TunerFrontendScanMessage;
+import android.hardware.tv.tuner.FrontendEventType;
+import android.hardware.tv.tuner.FrontendScanMessage;
+import android.hardware.tv.tuner.FrontendScanMessageType;
 
 /**
  * TunerFrontendCallback interface handles tuner frontend related callbacks.
@@ -24,13 +26,14 @@
  * {@hide}
  */
 interface ITunerFrontendCallback {
-        /**
+    /**
      * Notify the client that a new event happened on the frontend.
      */
-    void onEvent(in int frontendEventType);
+    void onEvent(in FrontendEventType frontendEventType);
 
     /**
      * notify the client of scan messages.
      */
-    void onScanMessage(in int messageType, in TunerFrontendScanMessage message);
+    void onScanMessage(in FrontendScanMessageType messageType,
+        in FrontendScanMessage message);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl
index d62145e..79f0761 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerLnb.aidl
@@ -16,6 +16,9 @@
 
 package android.media.tv.tuner;
 
+import android.hardware.tv.tuner.LnbPosition;
+import android.hardware.tv.tuner.LnbTone;
+import android.hardware.tv.tuner.LnbVoltage;
 import android.media.tv.tuner.ITunerLnbCallback;
 
 /**
@@ -32,17 +35,17 @@
     /**
      * Set the lnb's power voltage.
      */
-    void setVoltage(in int voltage);
+    void setVoltage(in LnbVoltage voltage);
 
     /**
      * Set the lnb's tone mode.
      */
-    void setTone(in int tone);
+    void setTone(in LnbTone tone);
 
     /**
      * Select the lnb's position.
      */
-    void setSatellitePosition(in int position);
+    void setSatellitePosition(in LnbPosition position);
 
     /**
      * Sends DiSEqC (Digital Satellite Equipment Control) message.
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl
index 117352f..2b6eb5f 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerLnbCallback.aidl
@@ -16,6 +16,8 @@
 
 package android.media.tv.tuner;
 
+import android.hardware.tv.tuner.LnbEventType;
+
 /**
  * TuneLnbCallback interface handles tuner lnb related callbacks.
  *
@@ -25,7 +27,7 @@
     /**
      * Notify the client that a new event happened on the Lnb.
      */
-    void onEvent(in int lnbEventType);
+    void onEvent(in LnbEventType lnbEventType);
 
     /**
      * notify the client of new DiSEqC message.
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index 755b152..b8084ab 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2020, The Android Open Source Project
+ * Copyright (c) 2021, The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -16,16 +16,15 @@
 
 package android.media.tv.tuner;
 
-import android.hardware.common.fmq.MQDescriptor;
-import android.hardware.common.fmq.SynchronizedReadWrite;
-import android.hardware.common.fmq.UnsynchronizedWrite;
+import android.hardware.tv.tuner.DemuxCapabilities;
+import android.hardware.tv.tuner.FrontendInfo;
+import android.hardware.tv.tuner.FrontendType;
 import android.media.tv.tuner.ITunerDemux;
 import android.media.tv.tuner.ITunerDescrambler;
+import android.media.tv.tuner.ITunerFilter;
+import android.media.tv.tuner.ITunerFilterCallback;
 import android.media.tv.tuner.ITunerFrontend;
 import android.media.tv.tuner.ITunerLnb;
-import android.media.tv.tuner.TunerDemuxCapabilities;
-import android.media.tv.tuner.TunerFrontendDtmbCapabilities;
-import android.media.tv.tuner.TunerFrontendInfo;
 
 /**
  * TunerService interface handles tuner related operations.
@@ -33,8 +32,8 @@
  * {@hide}
  */
 //@VintfStability
+@SuppressWarnings(value={"out-array"})
 interface ITunerService {
-
     /**
      * Gets frontend IDs.
      */
@@ -43,15 +42,10 @@
     /**
      * Retrieve the frontend's information.
      *
-     * @param frontendHandle the handle of the frontend granted by TRM.
+     * @param frontendId the ID of the frontend.
      * @return the information of the frontend.
      */
-    TunerFrontendInfo getFrontendInfo(in int frontendHandle);
-
-    /**
-     * Get Dtmb Frontend Capabilities.
-     */
-    TunerFrontendDtmbCapabilities getFrontendDtmbCapabilities(in int id);
+    FrontendInfo getFrontendInfo(in int frontendId);
 
     /**
      * Open a Tuner Frontend interface.
@@ -87,7 +81,7 @@
      *
      * @return the demux’s capabilities.
      */
-    TunerDemuxCapabilities getDemuxCaps();
+    DemuxCapabilities getDemuxCaps();
 
     /* Open a new interface of ITunerDescrambler given a descramblerHandle.
      *
@@ -102,4 +96,38 @@
      * value is unknown version 0.
      */
     int getTunerHalVersion();
+
+    /**
+     * Open a new SharedFilter instance of ITunerFilter.
+     *
+     * @param filterToken the SharedFilter token created by ITunerFilter.
+     * @param cb the ITunerFilterCallback used to receive callback events
+     * @return a newly created ITunerFilter interface.
+     */
+    ITunerFilter openSharedFilter(in String filterToken, in ITunerFilterCallback cb);
+
+    /**
+     * Enable or Disable Low Noise Amplifier (LNA).
+     *
+     * @param bEnable enable Lna or not.
+     */
+    void setLna(in boolean bEnable);
+
+    /**
+     * Set the maximum usable frontends number of a given frontend type. It's used by client
+     * to enable or disable frontends when cable connection status is changed by user.
+     *
+     * @param frontendType the frontend type which the maximum usable number will be set.
+     * @param maxNumber the new maximum usable number.
+     */
+    void setMaxNumberOfFrontends(in FrontendType frontendType, in int maxNumber);
+
+    /**
+     * Get the maximum usable frontends number of a given frontend type.
+     *
+     * @param frontendType the frontend type which the maximum usable number will be queried.
+     *
+     * @return the maximum usable number of the queried frontend type.
+     */
+    int getMaxNumberOfFrontends(in FrontendType frontendType);
 }
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerAudioExtraMetaData.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerAudioExtraMetaData.aidl
deleted file mode 100644
index df3374a..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerAudioExtraMetaData.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Extra Meta Data from AD (Audio Descriptor) according to ETSI TS 101 154 V2.1.1.
- *
- * {@hide}
- */
-parcelable TunerAudioExtraMetaData {
-    byte adFade;
-
-    byte adPan;
-
-    byte versionTextTag;
-
-    byte adGainCenter;
-
-    byte adGainFront;
-
-    byte adGainSurround;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxCapabilities.aidl
deleted file mode 100644
index 71ab151..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxCapabilities.aidl
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Tuner Demux capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerDemuxCapabilities {
-    int numDemux;
-
-    int numRecord;
-
-    int numPlayback;
-
-    int numTsFilter;
-
-    int numSectionFilter;
-
-    int numAudioFilter;
-
-    int numVideoFilter;
-
-    int numPesFilter;
-
-    int numPcrFilter;
-
-    int numBytesInSectionFilter;
-
-    int filterCaps;
-
-    int[] linkCaps;
-
-    boolean bTimeFilter;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddress.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddress.aidl
deleted file mode 100644
index b65f404..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddress.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Demux ip address configure.
- *
- * {@hide}
- */
-parcelable TunerDemuxIpAddress {
-    boolean isIpV6;
-
-    byte[] addr;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddressSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddressSettings.aidl
deleted file mode 100644
index b244388..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxIpAddressSettings.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerDemuxIpAddress;
-
-/**
- * Filter Settings for an Ip filter.
- *
- * {@hide}
- */
-parcelable TunerDemuxIpAddressSettings {
-    TunerDemuxIpAddress srcIpAddress;
-
-    TunerDemuxIpAddress dstIpAddress;
-
-    char srcPort;
-
-    char dstPort;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
deleted file mode 100644
index 8b238b6..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerDemuxPid.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Demux PID interface.
- *
- * {@hide}
- */
-union TunerDemuxPid {
-    char tPid;
-
-    char mmtpPid;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerDvrSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerDvrSettings.aidl
deleted file mode 100644
index 4ec4d75..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerDvrSettings.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Dvr Settings interface.
- *
- * {@hide}
- */
-parcelable TunerDvrSettings {
-    int statusMask;
-
-    int lowThreshold;
-
-    int highThreshold;
-
-    int dataFormat;
-
-    int packetSize;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterAlpConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterAlpConfiguration.aidl
deleted file mode 100644
index 4c9e3af..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterAlpConfiguration.aidl
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterSettings;
-
-/**
- * Filter Settings for an ALP filter.
- *
- * {@hide}
- */
-parcelable TunerFilterAlpConfiguration {
-    byte packetType;
-
-    byte lengthType;
-
-    TunerFilterSettings filterSettings;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterAvSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterAvSettings.aidl
deleted file mode 100644
index 6bf88f0..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterAvSettings.aidl
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Settings for a Video and Audio.
- *
- * {@hide}
- */
-parcelable TunerFilterAvSettings {
-    /**
-     * true if the filter output goes to decoder directly in pass through mode.
-     */
-    boolean isPassthrough;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterConfiguration.aidl
deleted file mode 100644
index 808cfd1..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterConfiguration.aidl
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterAlpConfiguration;
-import android.media.tv.tuner.TunerFilterIpConfiguration;
-import android.media.tv.tuner.TunerFilterMmtpConfiguration;
-import android.media.tv.tuner.TunerFilterTlvConfiguration;
-import android.media.tv.tuner.TunerFilterTsConfiguration;
-
-/**
- * Filter configuration.
- *
- * {@hide}
- */
-union TunerFilterConfiguration {
-    TunerFilterTsConfiguration ts;
-
-    TunerFilterMmtpConfiguration mmtp;
-
-    TunerFilterIpConfiguration ip;
-
-    TunerFilterTlvConfiguration tlv;
-
-    TunerFilterAlpConfiguration alp;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadEvent.aidl
deleted file mode 100644
index b971dd3..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadEvent.aidl
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Event for Download data.
- *
- * {@hide}
- */
-parcelable TunerFilterDownloadEvent {
-    int itemId;
-
-    /**
-     * MPU sequence number of filtered data (only for MMTP)
-     */
-    int mpuSequenceNumber;
-
-    int itemFragmentIndex;
-
-    int lastItemFragmentIndex;
-
-    /**
-     * Data size in bytes of filtered data
-     */
-    char dataLength;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadSettings.aidl
deleted file mode 100644
index 417a5fe..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterDownloadSettings.aidl
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Settings for downloading.
- *
- * {@hide}
- */
-parcelable TunerFilterDownloadSettings {
-    int downloadId;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterEvent.aidl
deleted file mode 100644
index 1305510..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterEvent.aidl
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterDownloadEvent;
-import android.media.tv.tuner.TunerFilterIpPayloadEvent;
-import android.media.tv.tuner.TunerFilterMediaEvent;
-import android.media.tv.tuner.TunerFilterMmtpRecordEvent;
-import android.media.tv.tuner.TunerFilterMonitorEvent;
-import android.media.tv.tuner.TunerFilterPesEvent;
-import android.media.tv.tuner.TunerFilterSectionEvent;
-import android.media.tv.tuner.TunerFilterTemiEvent;
-import android.media.tv.tuner.TunerFilterTsRecordEvent;
-
-/**
- * Filter events.
- *
- * {@hide}
- */
-union TunerFilterEvent {
-    TunerFilterMediaEvent media;
-
-    TunerFilterSectionEvent section;
-
-    TunerFilterPesEvent pes;
-
-    TunerFilterTsRecordEvent tsRecord;
-
-    TunerFilterMmtpRecordEvent mmtpRecord;
-
-    TunerFilterDownloadEvent download;
-
-    TunerFilterIpPayloadEvent ipPayload;
-
-    TunerFilterTemiEvent temi;
-
-    TunerFilterMonitorEvent monitor;
-
-    int startId;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpConfiguration.aidl
deleted file mode 100644
index 8b4d889..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpConfiguration.aidl
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerDemuxIpAddressSettings;
-import android.media.tv.tuner.TunerFilterSettings;
-
-/**
- * Filter Settings for a ip filter.
- *
- * {@hide}
- */
-parcelable TunerFilterIpConfiguration {
-    TunerDemuxIpAddressSettings ipAddr;
-
-    TunerFilterSettings filterSettings;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpPayloadEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpPayloadEvent.aidl
deleted file mode 100644
index d5bda93..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterIpPayloadEvent.aidl
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Event for IP payload data.
- *
- * {@hide}
- */
-parcelable TunerFilterIpPayloadEvent {
-    /**
-     * Data size in bytes of ip data
-     */
-    char dataLength;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
deleted file mode 100644
index c3dbce9..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMediaEvent.aidl
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.hardware.common.NativeHandle;
-import android.media.tv.tuner.TunerAudioExtraMetaData;
-
-/**
- * Filter Event for Audio or Video Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterMediaEvent {
-    char streamId;
-
-    /**
-     * true if PTS is present in PES header.
-     */
-    boolean isPtsPresent;
-
-    /**
-     * Presentation Time Stamp for audio or video frame. It based on 90KHz has
-     * the same format as PTS (Presentation Time Stamp).
-     */
-    long pts;
-
-    /**
-     * Data size in bytes of audio or video frame
-     */
-    int dataLength;
-
-    /**
-     *  The offset in the memory block which is shared among multiple
-     *  MediaEvents.
-     */
-    int offset;
-
-    /**
-     * A handle associated to the memory where audio or video data stays.
-     */
-    NativeHandle avMemory;
-
-    /**
-     * True if the avMemory is in secure area, and isn't mappable.
-     */
-    boolean isSecureMemory;
-
-    /**
-     * An Id is used by HAL to provide additional information for AV data.
-     * For secure audio, it's the audio handle used by Audio Track.
-     */
-    long avDataId;
-
-    /**
-     * MPU sequence number of filtered data (only for MMTP)
-     */
-    int mpuSequenceNumber;
-
-    boolean isPesPrivateData;
-
-    /**
-     * If TunerAudioExtraMetaData field is valid or not
-     */
-    boolean isAudioExtraMetaData;
-
-    /**
-     * Only valid when isAudioExtraMetaData is true
-     */
-    TunerAudioExtraMetaData audio;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpConfiguration.aidl
deleted file mode 100644
index 162ca8e..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpConfiguration.aidl
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterSettings;
-
-/**
- * Filter Settings for an mmtp filter.
- *
- * {@hide}
- */
-parcelable TunerFilterMmtpConfiguration {
-    char mmtpPid;
-
-    TunerFilterSettings filterSettings;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpRecordEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpRecordEvent.aidl
deleted file mode 100644
index b8871cf..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMmtpRecordEvent.aidl
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Event for an MMTP Record Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterMmtpRecordEvent {
-    int scHevcIndexMask;
-
-    /**
-     * Byte number from beginning of the filter's output
-     */
-    long byteNumber;
-
-    /**
-     * If the current event contains extended information or not
-     */
-    boolean isExtended;
-
-    /**
-     * The Presentation Time Stamp(PTS) for the audio or video frame. It is based on 90KHz
-     * and has the same format as the PTS in ISO/IEC 13818-1.
-     */
-    long pts;
-
-    /**
-     * MPU sequence number of the filtered data. This is only used for MMTP.
-     */
-    int mpuSequenceNumber;
-
-    /**
-     * Specifies the address of the first macroblock in the slice defined in ITU-T Rec. H.264.
-     */
-    int firstMbInSlice;
-
-    /**
-     * TS index mask.
-     */
-    int tsIndexMask;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMonitorEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterMonitorEvent.aidl
deleted file mode 100644
index 31ab5e6..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterMonitorEvent.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter monitor events.
- *
- * {@hide}
- */
-union TunerFilterMonitorEvent {
-    /**
-     * New scrambling status.
-     */
-    int scramblingStatus;
-
-    /**
-     * New cid for the IP filter.
-     */
-    int cid;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesDataSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesDataSettings.aidl
deleted file mode 100644
index 312f314..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesDataSettings.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Settings for Pes Data.
- *
- * {@hide}
- */
-parcelable TunerFilterPesDataSettings {
-    char streamId;
-
-    boolean isRaw;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
deleted file mode 100644
index dc1ecc6..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterPesEvent.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Event for PES Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterPesEvent {
-    char streamId;
-
-    /**
-     * Data size in bytes of PES data
-     */
-    char dataLength;
-
-    /**
-     * MPU sequence number of filtered data
-     */
-    int mpuSequenceNumber;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterRecordSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterRecordSettings.aidl
deleted file mode 100644
index 29be624..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterRecordSettings.aidl
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterScIndexMask;
-
-/**
- * Filter Settings for recording.
- *
- * {@hide}
- */
-parcelable TunerFilterRecordSettings {
-    int tsIndexMask;
-
-    int scIndexType;
-
-    TunerFilterScIndexMask scIndexMask;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterScIndexMask.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterScIndexMask.aidl
deleted file mode 100644
index ed37fce..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterScIndexMask.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter SC Index Mask
- *
- * {@hide}
- */
-union TunerFilterScIndexMask {
-    int sc;
-
-    int scHevc;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionBits.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionBits.aidl
deleted file mode 100644
index dd4f842..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionBits.aidl
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Bits settings of a section Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterSectionBits {
-    byte[] filter;
-
-    byte[] mask;
-
-    byte[] mode;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionCondition.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionCondition.aidl
deleted file mode 100644
index 00aabe4..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionCondition.aidl
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterSectionBits;
-import android.media.tv.tuner.TunerFilterSectionTableInfo;
-
-/**
- * Section filter condition settings.
- *
- * {@hide}
- */
-union TunerFilterSectionCondition {
-    TunerFilterSectionBits sectionBits;
-
-    TunerFilterSectionTableInfo tableInfo;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionEvent.aidl
deleted file mode 100644
index 5f20926..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionEvent.aidl
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Event for Section Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterSectionEvent {
-    /**
-     * Table ID of filtered data
-     */
-    char tableId;
-
-    /**
-     * Version number of filtered data
-     */
-    char version;
-
-    /**
-     * Section number of filtered data
-     */
-    char sectionNum;
-
-    /**
-     * Data size in bytes of filtered data
-     */
-    char dataLength;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionSettings.aidl
deleted file mode 100644
index 22129b6..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionSettings.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterSectionCondition;
-
-/**
- * Filter Settings for a section filter.
- *
- * {@hide}
- */
-parcelable TunerFilterSectionSettings {
-    TunerFilterSectionCondition condition;
-
-    boolean isCheckCrc;
-
-    boolean isRepeat;
-
-    boolean isRaw;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionTableInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionTableInfo.aidl
deleted file mode 100644
index cc78c9d..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSectionTableInfo.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Table info settings of a section Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterSectionTableInfo {
-    char tableId;
-
-    char version;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSettings.aidl
deleted file mode 100644
index eb7eaa5..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSettings.aidl
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterAvSettings;
-import android.media.tv.tuner.TunerFilterDownloadSettings;
-import android.media.tv.tuner.TunerFilterPesDataSettings;
-import android.media.tv.tuner.TunerFilterRecordSettings;
-import android.media.tv.tuner.TunerFilterSectionSettings;
-
-/**
- * Filter Settings.
- *
- * {@hide}
- */
-union TunerFilterSettings {
-    boolean nothing;
-
-    TunerFilterAvSettings av;
-
-    TunerFilterSectionSettings section;
-
-    TunerFilterPesDataSettings pesData;
-
-    TunerFilterRecordSettings record;
-
-    TunerFilterDownloadSettings download;
-
-    boolean isPassthrough;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSharedHandleInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterSharedHandleInfo.aidl
deleted file mode 100644
index 122dfc3..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterSharedHandleInfo.aidl
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.hardware.common.NativeHandle;
-
-/**
- * Filter Shared Handle Information.
- *
- * {@hide}
- */
-parcelable TunerFilterSharedHandleInfo {
-    NativeHandle handle;
-    long size;
-}
\ No newline at end of file
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTemiEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTemiEvent.aidl
deleted file mode 100644
index 4c4e993..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTemiEvent.aidl
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Filter Event for Timed External Media Information (TEMI) data.
- *
- * {@hide}
- */
-parcelable TunerFilterTemiEvent {
-    /**
-     * Presentation Time Stamp for audio or video frame. It based on 90KHz has
-     * the same format as PTS (Presentation Time Stamp) in ISO/IEC 13818-1.
-     */
-    long pts;
-
-    /**
-     * TEMI Descriptor Tag
-     */
-    byte descrTag;
-
-    /**
-     * TEMI Descriptor
-     */
-    byte[] descrData;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTlvConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTlvConfiguration.aidl
deleted file mode 100644
index 0b237b4..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTlvConfiguration.aidl
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterSettings;
-
-/**
- * Filter Settings for a tlv filter.
- *
- * {@hide}
- */
-parcelable TunerFilterTlvConfiguration {
-    byte packetType;
-
-    boolean isCompressedIpPacket;
-
-    TunerFilterSettings filterSettings;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsConfiguration.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsConfiguration.aidl
deleted file mode 100644
index 2e386e6..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsConfiguration.aidl
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterSettings;
-
-/**
- * Filter Settings for a TS filter.
- *
- * {@hide}
- */
-parcelable TunerFilterTsConfiguration {
-    char tpid;
-
-    TunerFilterSettings filterSettings;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsRecordEvent.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsRecordEvent.aidl
deleted file mode 100644
index c52a749..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFilterTsRecordEvent.aidl
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFilterScIndexMask;
-
-/**
- * Filter Event for TS Record Filter.
- *
- * {@hide}
- */
-parcelable TunerFilterTsRecordEvent {
-    char pid;
-
-    int tsIndexMask;
-
-    /**
-     * Indexes of record output
-     */
-    TunerFilterScIndexMask scIndexMask;
-
-    /**
-     * Byte number from beginning of the filter's output
-     */
-    long byteNumber;
-
-    /**
-     * If the current event contains extended information or not
-     */
-    boolean isExtended;
-
-    /**
-     * The Presentation Time Stamp(PTS) for the audio or video frame. It is based on 90KHz
-     * and has the same format as the PTS in ISO/IEC 13818-1.
-     */
-    long pts;
-
-    /**
-     * Specifies the address of the first macroblock in the slice defined in ITU-T Rec. H.264.
-     */
-    int firstMbInSlice;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
deleted file mode 100644
index 74bf04e..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogCapabilities.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Analog Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAnalogCapabilities {
-	/**
-     * Signal Type capability
-     */
-    int typeCap;
-
-    /**
-     * Standard Interchange Format (SIF) capability
-     */
-    int sifStandardCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl
deleted file mode 100644
index 40cd8c9..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAnalogSettings.aidl
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Analog Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAnalogSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    int signalType;
-
-    /**
-     * Standard Interchange Format (SIF) setting
-     */
-    int sifStandard;
-
-    /**
-     * Fields after isExtended are only valid when isExtended is true
-     */
-    boolean isExtended;
-
-    int aftFlag;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
deleted file mode 100644
index 6c9be77..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Capabilities.aidl
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * ATSC3 Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAtsc3Capabilities {
-    /**
-     * Bandwidth capability
-     */
-    int bandwidthCap;
-
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-
-    /**
-     * TimeInterleaveMode capability
-     */
-    int timeInterleaveModeCap;
-
-    /**
-     * CodeRate capability
-     */
-    int codeRateCap;
-
-    /**
-     * FEC capability
-     */
-    int fecCap;
-
-    /**
-     * Demodulator Output Format capability
-     */
-    int demodOutputFormatCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl
deleted file mode 100644
index b29e1f7..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3PlpSettings.aidl
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Atsc3 Frontend Physical Layer Pipe Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAtsc3PlpSettings {
-    int plpId;
-
-    int modulation;
-
-    int interleaveMode;
-
-    int codeRate;
-
-    /**
-     * Forward Error Correction Type.
-     */
-    int fec;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl
deleted file mode 100644
index 32fb8c7..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtsc3Settings.aidl
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendAtsc3PlpSettings;
-
-/**
- * Atsc3 Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAtsc3Settings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    /**
-     * Bandwidth of tuning band.
-     */
-    int bandwidth;
-
-    int demodOutputFormat;
-
-    TunerFrontendAtsc3PlpSettings[] plpSettings;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
deleted file mode 100644
index 2b6c2fc..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscCapabilities.aidl
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * ATSC Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAtscCapabilities {
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-}
\ No newline at end of file
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl
deleted file mode 100644
index c7a8c07..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendAtscSettings.aidl
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Atsc Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendAtscSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    int modulation;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
deleted file mode 100644
index b880c60..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableCapabilities.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Cable(DVBC) Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendCableCapabilities {
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-
-    /**
-     * Code Rate capability
-     */
-    long codeRateCap; // inner FEC will converge to codeRate
-
-    /**
-     * Annex capability
-     */
-    int annexCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl
deleted file mode 100644
index b9bcf29..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCableSettings.aidl
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Cable Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendCableSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    int modulation;
-
-    /**
-     * Inner Forward Error Correction type as specified in ETSI EN 300 468 V1.15.1
-     * and ETSI EN 302 307-2 V1.1.1.
-     */
-    long innerFec;
-
-    /**
-     * Symbols per second
-     */
-    int symbolRate;
-
-    /**
-     * Outer Forward Error Correction (FEC) Type.
-     */
-    int outerFec;
-
-    int annex;
-
-    /**
-     * Spectral Inversion Type.
-     */
-    int spectralInversion;
-
-    /**
-     * Fields after isExtended are only valid when isExtended is true
-     */
-    boolean isExtended;
-
-    int interleaveMode;
-
-    int bandwidth;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
deleted file mode 100644
index 19f31f1..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendCapabilities.aidl
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendAnalogCapabilities;
-import android.media.tv.tuner.TunerFrontendAtscCapabilities;
-import android.media.tv.tuner.TunerFrontendAtsc3Capabilities;
-import android.media.tv.tuner.TunerFrontendCableCapabilities;
-import android.media.tv.tuner.TunerFrontendDvbsCapabilities;
-import android.media.tv.tuner.TunerFrontendDvbtCapabilities;
-import android.media.tv.tuner.TunerFrontendIsdbsCapabilities;
-import android.media.tv.tuner.TunerFrontendIsdbs3Capabilities;
-import android.media.tv.tuner.TunerFrontendIsdbtCapabilities;
-
-/**
- * Frontend Capabilities interface.
- *
- * Use a group of vectors as the workaround for Union structure that is not fully supported
- * in AIDL currently.
- *
- * Client may use FrontendInfo.type as the discriminar to check the corresponding vector. If
- * the vector is not null, it contains valid value.
- *
- * {@hide}
- */
-union TunerFrontendCapabilities {
-    /**
-     * Analog Frontend Capabilities
-     */
-    TunerFrontendAnalogCapabilities analogCaps;
-
-    /**
-     * ATSC Frontend Capabilities
-     */
-    TunerFrontendAtscCapabilities atscCaps;
-
-    /**
-     * ATSC3 Frontend Capabilities
-     */
-    TunerFrontendAtsc3Capabilities atsc3Caps;
-
-    /**
-     * Cable Frontend Capabilities
-     */
-    TunerFrontendCableCapabilities cableCaps;
-
-    /**
-     * DVBS Frontend Capabilities
-     */
-    TunerFrontendDvbsCapabilities dvbsCaps;
-
-    /**
-     * DVBT Frontend Capabilities
-     */
-    TunerFrontendDvbtCapabilities dvbtCaps;
-
-    /**
-     * ISDB-S Frontend Capabilities
-     */
-    TunerFrontendIsdbsCapabilities isdbsCaps;
-
-    /**
-     * ISDB-S3 Frontend Capabilities
-     */
-    TunerFrontendIsdbs3Capabilities isdbs3Caps;
-
-    /**
-     * ISDB-T Frontend Capabilities
-     */
-    TunerFrontendIsdbtCapabilities isdbtCaps;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbCapabilities.aidl
deleted file mode 100644
index e8e4933..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbCapabilities.aidl
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * DTMB Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDtmbCapabilities {
-    int transmissionModeCap;
-
-    int bandwidthCap;
-
-    int modulationCap;
-
-    int codeRateCap;
-
-    int guardIntervalCap;
-
-    int interleaveModeCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbSettings.aidl
deleted file mode 100644
index 45e7ff9..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDtmbSettings.aidl
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * DTMB Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDtmbSettings {
-    int frequency;
-
-    int transmissionMode;
-
-    int bandwidth;
-
-    int modulation;
-
-    int codeRate;
-
-    int guardInterval;
-
-    int interleaveMode;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
deleted file mode 100644
index 5e4322c..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCapabilities.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * DVBS Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDvbsCapabilities {
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-
-    /**
-     * Code Rate capability
-     */
-    long codeRateCap;  // inner FEC will converge to codeRate
-
-    /**
-     * Sub standards capability
-     */
-    int standard;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl
deleted file mode 100644
index 59b7de3..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsCodeRate.aidl
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Dvbs Frontend CodeRate interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDvbsCodeRate {
-    /**
-     * Inner Forward Error Correction type as specified in ETSI EN 300 468 V1.15.1
-     * and ETSI EN 302 307-2 V1.1.1.
-     */
-    long fec;
-
-    boolean isLinear;
-
-    /**
-     * true if enable short frame
-     */
-    boolean isShortFrames;
-
-    /**
-     * bits number in 1000 symbol. 0 if use the default.
-     */
-    int bitsPer1000Symbol;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl
deleted file mode 100644
index ec3e4b9..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbsSettings.aidl
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendDvbsCodeRate;
-
-/**
- * Dvbs Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDvbsSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    int modulation;
-
-    TunerFrontendDvbsCodeRate codeRate;
-
-    int symbolRate;
-
-    /**
-     * Roll off type.
-     */
-    int rolloff;
-
-    /**
-     * Pilot mode.
-     */
-    int pilot;
-
-    int inputStreamId;
-
-    int standard;
-
-    /**
-     * Vcm mode.
-     */
-    int vcm;
-
-    /**
-     * Fields after isExtended are only valid when isExtended is true
-     */
-    boolean isExtended;
-
-    int scanType;
-
-    boolean isDiseqcRxMessage;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
deleted file mode 100644
index 73f16dd..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtCapabilities.aidl
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * DVBT Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDvbtCapabilities {
-    /**
-     * Transmission Mode capability
-     */
-    int transmissionModeCap;
-
-    /**
-     * Bandwidth capability
-     */
-    int bandwidthCap;
-
-    /**
-     * Constellation capability
-     */
-    int constellationCap;
-
-    /**
-     * Code Rate capability
-     */
-    int codeRateCap;
-
-    /**
-     * Hierarchy Type capability
-     */
-    int hierarchyCap;
-
-    /**
-     * Guard Interval capability
-     */
-    int guardIntervalCap;
-
-    /**
-     * T2 Support capability
-     */
-    boolean isT2Supported;
-
-    /**
-     * Miso Support capability
-     */
-    boolean isMisoSupported;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl
deleted file mode 100644
index 14c942a..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendDvbtSettings.aidl
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Dvbt Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendDvbtSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    int transmissionMode;
-
-    int bandwidth;
-
-    int constellation;
-
-    int hierarchy;
-
-    /**
-     * Code Rate for High Priority level
-     */
-    int hpCodeRate;
-
-    /**
-     * Code Rate for Low Priority level
-     */
-    int lpCodeRate;
-
-    int guardInterval;
-
-    boolean isHighPriority;
-
-    int standard;
-
-    boolean isMiso;
-
-    /**
-     * Physical Layer Pipe (PLP) mode
-     */
-    int plpMode;
-
-    /**
-     * Physical Layer Pipe (PLP) Id
-     */
-    int plpId;
-
-    /**
-     * Physical Layer Pipe (PLP) Group Id
-     */
-    int plpGroupId;
-
-    /**
-     * Fields after isExtended are only valid when isExtended is true
-     */
-    boolean isExtended;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendInfo.aidl
deleted file mode 100644
index 4bccd56..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendInfo.aidl
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendCapabilities;
-
-/**
- * FrontendInfo interface that carries tuner frontend information.
- *
- * <p>This is used to update the TunerResourceManager and pass Frontend
- * information from HAL to the client side.
- *
- * {@hide}
- */
-parcelable TunerFrontendInfo {
-    /**
-     * Frontend Handle
-     */
-    int handle;
-
-    /**
-     * Frontend Type
-     */
-    int type;
-
-    /**
-     * Minimum Frequency in Hertz
-     */
-    int minFrequency;
-
-    /**
-     * Maximum Frequency in Hertz
-     */
-    int maxFrequency;
-
-    /**
-     * Minimum symbols per second
-     */
-    int minSymbolRate;
-
-    /**
-     * Maximum symbols per second
-     */
-    int maxSymbolRate;
-
-    /**
-     * Range in Hertz
-     */
-    int acquireRange;
-
-    /**
-     * Frontends are assigned with the same exclusiveGroupId if they can't
-     * function at same time. For instance, they share same hardware module.
-     */
-    int exclusiveGroupId;
-
-    /**
-     * A list of supported status types which client can inquiry
-     */
-    int[] statusCaps;
-
-    /**
-     * Frontend Capabilities
-     */
-    TunerFrontendCapabilities caps;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
deleted file mode 100644
index 84dd67a..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Capabilities.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * ISDB-S3 Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendIsdbs3Capabilities {
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-
-    /**
-     * Code Rate capability
-     */
-    int codeRateCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
deleted file mode 100644
index 9a11fd5..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbs3Settings.aidl
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Isdbs3 Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendIsdbs3Settings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    char streamId;
-
-    int streamIdType;
-
-    int modulation;
-
-    int codeRate;
-
-    /**
-     * Symbols per second
-     */
-    int symbolRate;
-
-    int rolloff;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
deleted file mode 100644
index 15dfdf7..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsCapabilities.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * ISDB-S Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendIsdbsCapabilities {
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-
-    /**
-     * Code Rate capability
-     */
-    int codeRateCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
deleted file mode 100644
index dff9f4a..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbsSettings.aidl
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Isdbs Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendIsdbsSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    char streamId;
-
-    int streamIdType;
-
-    int modulation;
-
-    int codeRate;
-
-    /**
-     * Symbols per second
-     */
-    int symbolRate;
-
-    int rolloff;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
deleted file mode 100644
index c9295d8..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtCapabilities.aidl
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * ISDB-T Frontend Capabilities interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendIsdbtCapabilities {
-    /**
-     * ISDB-T Mode capability
-     */
-    int modeCap;
-
-    /**
-     * Bandwidth capability
-     */
-    int bandwidthCap;
-
-    /**
-     * Modulation capability
-     */
-    int modulationCap;
-
-    /**
-     * Code Rate capability
-     */
-    int codeRateCap;
-
-    /**
-     * Guard Interval capability
-     */
-    int guardIntervalCap;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl
deleted file mode 100644
index 191f3a6..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendIsdbtSettings.aidl
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Isdbt Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendIsdbtSettings {
-    /**
-     * Signal frequency in Hertz
-     */
-    int frequency;
-
-    int modulation;
-
-    int bandwidth;
-
-    int mode;
-
-    int codeRate;
-
-    int guardInterval;
-
-    int serviceAreaId;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanAtsc3PlpInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanAtsc3PlpInfo.aidl
deleted file mode 100644
index 1b8fcbb..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanAtsc3PlpInfo.aidl
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Atsc3 Frontend Physical Layer Pipe Info.
- *
- * {@hide}
- */
-parcelable TunerFrontendScanAtsc3PlpInfo {
-    byte plpId;
-
-    boolean llsFlag;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanMessage.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanMessage.aidl
deleted file mode 100644
index 9921ca1..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendScanMessage.aidl
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendScanAtsc3PlpInfo;
-
-/**
- * Tuner Frontend Scan Message interface.
- *
- * {@hide}
- */
-union TunerFrontendScanMessage {
-    boolean isLocked;
-
-    boolean isEnd;
-
-    byte progressPercent;
-
-    int[] frequencies;
-
-    int[] symbolRates;
-
-    int hierarchy;
-
-    int analogType;
-
-    byte[] plpIds;
-
-    byte[] groupIds;
-
-    char[] inputStreamIds;
-
-    int std;
-
-    TunerFrontendScanAtsc3PlpInfo[] atsc3PlpInfos;
-
-    int modulation;
-
-    int annex;
-
-    boolean isHighPriority;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl
deleted file mode 100644
index 70a5f3e..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendSettings.aidl
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendUnionSettings;
-
-/**
- * Frontend Settings interface.
- *
- * {@hide}
- */
-parcelable TunerFrontendSettings {
-    TunerFrontendUnionSettings settings;
-
-    boolean isExtended;
-
-    int endFrequency;
-
-    int inversion;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl
deleted file mode 100644
index 2b3c01b..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatus.aidl
+++ /dev/null
@@ -1,187 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendStatusAtsc3PlpInfo;
-
-/**
- * Tuner Frontend Status interface.
- *
- * {@hide}
- */
-union TunerFrontendStatus {
-    /**
-     * Lock status for Demod in True/False.
-     */
-    boolean isDemodLocked;
-
-    /**
-     * SNR value measured by 0.001 dB.
-     */
-    int snr;
-
-    /**
-     * The number of error bits per 1 billion bits.
-     */
-    int ber;
-
-    /**
-     * The number of error packages per 1 billion packages.
-     */
-    int per;
-
-    /**
-     * The number of error bits per 1 billion bits before FEC.
-     */
-    int preBer;
-
-    /**
-     * Signal Quality in percent.
-     */
-    int signalQuality;
-
-    /**
-     * Signal Strength measured by 0.001 dBm.
-     */
-    int signalStrength;
-
-    /**
-     * Symbols per second
-     */
-    int symbolRate;
-
-    long innerFec;
-
-    /**
-     * Check frontend type to decide the hidl type value
-     */
-    int modulation;
-
-    int inversion;
-
-    int lnbVoltage;
-
-    byte plpId;
-
-    boolean isEWBS;
-
-    /**
-     * AGC value is normalized from 0 to 255.
-     */
-    byte agc;
-
-    boolean isLnaOn;
-
-    boolean[] isLayerError;
-
-    /**
-     * MER value measured by 0.001 dB
-     */
-    int mer;
-
-    /**
-     * Frequency difference in Hertz.
-     */
-    int freqOffset;
-
-    int hierarchy;
-
-    boolean isRfLocked;
-
-    /**
-     * A list of PLP status for tuned PLPs for ATSC3 frontend.
-     */
-    TunerFrontendStatusAtsc3PlpInfo[] plpInfo;
-
-    // 1.1 Extension Starting
-
-    /**
-     * Extended modulation status. Check frontend type to decide the hidl type value.
-     */
-    int[] modulations;
-
-    /**
-     * Extended bit error ratio status.
-     */
-    int[] bers;
-
-    /**
-     * Extended code rate status.
-     */
-    long[] codeRates;
-
-    /**
-     * Extended bandwidth status. Check frontend type to decide the hidl type value.
-     */
-    int bandwidth;
-
-    /**
-     * Extended guard interval status. Check frontend type to decide the hidl type value.
-     */
-    int interval;
-
-    /**
-     * Extended transmission mode status. Check frontend type to decide the hidl type value.
-     */
-    int transmissionMode;
-
-    /**
-     * Uncorrectable Error Counts of the frontend's Physical Layer Pipe (PLP)
-     * since the last tune operation.
-     */
-    int uec;
-
-    /**
-     * The current DVB-T2 system id status.
-     */
-    char systemId;
-
-    /**
-     * Frontend Interleaving Modes. Check frontend type to decide the hidl type value.
-     */
-    int[] interleaving;
-
-    /**
-     * Segments in ISDB-T Specification of all the channels.
-     */
-    byte[] isdbtSegment;
-
-    /**
-     * Transport Stream Data Rate in BPS of the current channel.
-     */
-    int[] tsDataRate;
-
-    /**
-     * Roll Off Type status of the frontend. Check frontend type to decide the hidl type value.
-     */
-    int rollOff;
-
-    /**
-     * If the frontend currently supports MISO or not.
-     */
-    boolean isMiso;
-
-    /**
-     * If the frontend code rate is linear or not.
-     */
-    boolean isLinear;
-
-    /**
-     * If short frames are enabled or not.
-     */
-    boolean isShortFrames;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatusAtsc3PlpInfo.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatusAtsc3PlpInfo.aidl
deleted file mode 100644
index 4116c34..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendStatusAtsc3PlpInfo.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Copyright 2021, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-/**
- * Atsc3 Frontend Physical Layer Pipe Info in Frontend status.
- *
- * {@hide}
- */
-parcelable TunerFrontendStatusAtsc3PlpInfo {
-    /**
-     * PLP Id value.
-     */
-    byte plpId;
-
-    /**
-     * Demod Lock/Unlock status of this particular PLP.
-     */
-    boolean isLocked;
-
-    /**
-     * Uncorrectable Error Counts (UEC) of this particular PLP since last tune operation.
-     */
-    int uec;
-}
diff --git a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendUnionSettings.aidl b/services/tuner/aidl/android/media/tv/tuner/TunerFrontendUnionSettings.aidl
deleted file mode 100644
index c362c2a..0000000
--- a/services/tuner/aidl/android/media/tv/tuner/TunerFrontendUnionSettings.aidl
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Copyright 2020, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.tv.tuner;
-
-import android.media.tv.tuner.TunerFrontendAnalogSettings;
-import android.media.tv.tuner.TunerFrontendAtscSettings;
-import android.media.tv.tuner.TunerFrontendAtsc3Settings;
-import android.media.tv.tuner.TunerFrontendCableSettings;
-import android.media.tv.tuner.TunerFrontendDtmbSettings;
-import android.media.tv.tuner.TunerFrontendDvbsSettings;
-import android.media.tv.tuner.TunerFrontendDvbtSettings;
-import android.media.tv.tuner.TunerFrontendIsdbsSettings;
-import android.media.tv.tuner.TunerFrontendIsdbs3Settings;
-import android.media.tv.tuner.TunerFrontendIsdbtSettings;
-
-/**
- * Frontend Settings Union interface.
- *
- * {@hide}
- */
-union TunerFrontendUnionSettings {
-    TunerFrontendAnalogSettings analog;
-
-    TunerFrontendAtscSettings atsc;
-
-    TunerFrontendAtsc3Settings atsc3;
-
-    TunerFrontendCableSettings cable;
-
-    TunerFrontendDvbsSettings dvbs;
-
-    TunerFrontendDvbtSettings dvbt;
-
-    TunerFrontendIsdbsSettings isdbs;
-
-    TunerFrontendIsdbs3Settings isdbs3;
-
-    TunerFrontendIsdbtSettings isdbt;
-
-    TunerFrontendDtmbSettings dtmb;
-}
diff --git a/services/tuner/hidl/TunerHidlDemux.cpp b/services/tuner/hidl/TunerHidlDemux.cpp
new file mode 100644
index 0000000..a8151d2
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlDemux.cpp
@@ -0,0 +1,278 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerHidlDemux"
+
+#include "TunerHidlDemux.h"
+
+#include "TunerHidlDvr.h"
+#include "TunerHidlFilter.h"
+#include "TunerHidlTimeFilter.h"
+
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSubType;
+
+using HidlDemuxAlpFilterType = ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterType;
+using HidlDemuxFilterMainType = ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using HidlDemuxFilterType = ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using HidlDemuxIpFilterType = ::android::hardware::tv::tuner::V1_0::DemuxIpFilterType;
+using HidlDemuxMmtpFilterType = ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
+using HidlDemuxTlvFilterType = ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterType;
+using HidlDemuxTsFilterType = ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using HidlDvrType = ::android::hardware::tv::tuner::V1_0::DvrType;
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlDemux::TunerHidlDemux(sp<IDemux> demux, int id) {
+    mDemux = demux;
+    mDemuxId = id;
+}
+
+TunerHidlDemux::~TunerHidlDemux() {
+    mDemux = nullptr;
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSource(
+        const shared_ptr<ITunerFrontend>& in_frontend) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    int frontendId;
+    in_frontend->getFrontendId(&frontendId);
+    HidlResult res = mDemux->setFrontendDataSource(frontendId);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSourceById(int frontendId) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlResult res = mDemux->setFrontendDataSource(frontendId);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::openFilter(const DemuxFilterType& in_type,
+                                                int32_t in_bufferSize,
+                                                const shared_ptr<ITunerFilterCallback>& in_cb,
+                                                shared_ptr<ITunerFilter>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlDemuxFilterMainType mainType = static_cast<HidlDemuxFilterMainType>(in_type.mainType);
+    HidlDemuxFilterType filterType{
+            .mainType = mainType,
+    };
+
+    switch (mainType) {
+    case HidlDemuxFilterMainType::TS:
+        filterType.subType.tsFilterType(static_cast<HidlDemuxTsFilterType>(
+                in_type.subType.get<DemuxFilterSubType::Tag::tsFilterType>()));
+        break;
+    case HidlDemuxFilterMainType::MMTP:
+        filterType.subType.mmtpFilterType(static_cast<HidlDemuxMmtpFilterType>(
+                in_type.subType.get<DemuxFilterSubType::Tag::mmtpFilterType>()));
+        break;
+    case HidlDemuxFilterMainType::IP:
+        filterType.subType.ipFilterType(static_cast<HidlDemuxIpFilterType>(
+                in_type.subType.get<DemuxFilterSubType::Tag::ipFilterType>()));
+        break;
+    case HidlDemuxFilterMainType::TLV:
+        filterType.subType.tlvFilterType(static_cast<HidlDemuxTlvFilterType>(
+                in_type.subType.get<DemuxFilterSubType::Tag::tlvFilterType>()));
+        break;
+    case HidlDemuxFilterMainType::ALP:
+        filterType.subType.alpFilterType(static_cast<HidlDemuxAlpFilterType>(
+                in_type.subType.get<DemuxFilterSubType::Tag::alpFilterType>()));
+        break;
+    }
+    HidlResult status;
+    sp<HidlIFilter> filterSp;
+    sp<TunerHidlFilter::FilterCallback> filterCb = new TunerHidlFilter::FilterCallback(in_cb);
+    sp<::android::hardware::tv::tuner::V1_0::IFilterCallback> cbSp = filterCb;
+    mDemux->openFilter(filterType, static_cast<uint32_t>(in_bufferSize), cbSp,
+                       [&](HidlResult r, const sp<HidlIFilter>& filter) {
+                           filterSp = filter;
+                           status = r;
+                       });
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    sp<HidlITimeFilter> filterSp;
+    mDemux->openTimeFilter([&](HidlResult r, const sp<HidlITimeFilter>& filter) {
+        filterSp = filter;
+        status = r;
+    });
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlTimeFilter>(filterSp);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
+                                                   int32_t* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    uint32_t avSyncHwId;
+    HidlResult res;
+    sp<HidlIFilter> halFilter = static_cast<TunerHidlFilter*>(tunerFilter.get())->getHalFilter();
+    mDemux->getAvSyncHwId(halFilter, [&](HidlResult r, uint32_t id) {
+        res = r;
+        avSyncHwId = id;
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    *_aidl_return = (int)avSyncHwId;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    uint64_t time;
+    HidlResult res;
+    mDemux->getAvSyncTime(static_cast<uint32_t>(avSyncHwId), [&](HidlResult r, uint64_t ts) {
+        res = r;
+        time = ts;
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    *_aidl_return = (int64_t)time;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
+                                             const shared_ptr<ITunerDvrCallback>& in_cb,
+                                             shared_ptr<ITunerDvr>* _aidl_return) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlResult res;
+    sp<HidlIDvrCallback> callback = new TunerHidlDvr::DvrCallback(in_cb);
+    sp<HidlIDvr> hidlDvr;
+    mDemux->openDvr(static_cast<HidlDvrType>(in_dvbType), in_bufferSize, callback,
+                    [&](HidlResult r, const sp<HidlIDvr>& dvr) {
+                        hidlDvr = dvr;
+                        res = r;
+                    });
+    if (res != HidlResult::SUCCESS) {
+        *_aidl_return = nullptr;
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDvr>(hidlDvr, in_dvbType);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::connectCiCam(int32_t ciCamId) {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlResult res = mDemux->connectCiCam(static_cast<uint32_t>(ciCamId));
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::disconnectCiCam() {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlResult res = mDemux->disconnectCiCam();
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDemux::close() {
+    if (mDemux == nullptr) {
+        ALOGE("IDemux is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(HidlResult::UNAVAILABLE));
+    }
+
+    HidlResult res = mDemux->close();
+    mDemux = nullptr;
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlDemux.h b/services/tuner/hidl/TunerHidlDemux.h
new file mode 100644
index 0000000..d535da6
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlDemux.h
@@ -0,0 +1,75 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLDEMUX_H
+#define ANDROID_MEDIA_TUNERHIDLDEMUX_H
+
+#include <aidl/android/media/tv/tuner/BnTunerDemux.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+
+using ::aidl::android::hardware::tv::tuner::DemuxFilterType;
+using ::aidl::android::hardware::tv::tuner::DvrType;
+using ::android::sp;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::hardware::tv::tuner::V1_0::IDemux;
+using ::std::shared_ptr;
+using ::std::vector;
+
+using HidlIDemux = ::android::hardware::tv::tuner::V1_0::IDemux;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+class TunerHidlDemux : public BnTunerDemux {
+public:
+    TunerHidlDemux(sp<HidlIDemux> demux, int demuxId);
+    virtual ~TunerHidlDemux();
+
+    ::ndk::ScopedAStatus setFrontendDataSource(
+            const shared_ptr<ITunerFrontend>& in_frontend) override;
+    ::ndk::ScopedAStatus setFrontendDataSourceById(int frontendId) override;
+    ::ndk::ScopedAStatus openFilter(const DemuxFilterType& in_type, int32_t in_bufferSize,
+                                    const shared_ptr<ITunerFilterCallback>& in_cb,
+                                    shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus getAvSyncHwId(const shared_ptr<ITunerFilter>& in_tunerFilter,
+                                       int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getAvSyncTime(int32_t in_avSyncHwId, int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus openDvr(DvrType in_dvbType, int32_t in_bufferSize,
+                                 const shared_ptr<ITunerDvrCallback>& in_cb,
+                                 shared_ptr<ITunerDvr>* _aidl_return) override;
+    ::ndk::ScopedAStatus connectCiCam(int32_t in_ciCamId) override;
+    ::ndk::ScopedAStatus disconnectCiCam() override;
+    ::ndk::ScopedAStatus close() override;
+
+    int getId() { return mDemuxId; }
+
+private:
+    sp<HidlIDemux> mDemux;
+    int mDemuxId;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLDEMUX_H
diff --git a/services/tuner/hidl/TunerHidlDescrambler.cpp b/services/tuner/hidl/TunerHidlDescrambler.cpp
new file mode 100644
index 0000000..dd8cd9c
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlDescrambler.cpp
@@ -0,0 +1,149 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerHidlDescrambler"
+
+#include "TunerHidlDescrambler.h"
+
+#include <aidl/android/hardware/tv/tuner/Result.h>
+
+#include "TunerHidlDemux.h"
+#include "TunerHidlFilter.h"
+
+using ::aidl::android::hardware::tv::tuner::Result;
+
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlDescrambler::TunerHidlDescrambler(sp<HidlIDescrambler> descrambler) {
+    mDescrambler = descrambler;
+}
+
+TunerHidlDescrambler::~TunerHidlDescrambler() {
+    mDescrambler = nullptr;
+}
+
+::ndk::ScopedAStatus TunerHidlDescrambler::setDemuxSource(
+        const shared_ptr<ITunerDemux>& in_tunerDemux) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDescrambler->setDemuxSource(
+            static_cast<TunerHidlDemux*>(in_tunerDemux.get())->getId());
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDescrambler->setKeyToken(in_keyToken);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDescrambler::addPid(
+        const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    sp<HidlIFilter> halFilter =
+            (in_optionalSourceFilter == nullptr)
+                    ? nullptr
+                    : static_cast<TunerHidlFilter*>(in_optionalSourceFilter.get())->getHalFilter();
+    HidlResult res = mDescrambler->addPid(getHidlDemuxPid(in_pid), halFilter);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDescrambler::removePid(
+        const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    sp<HidlIFilter> halFilter =
+            (in_optionalSourceFilter == nullptr)
+                    ? nullptr
+                    : static_cast<TunerHidlFilter*>(in_optionalSourceFilter.get())->getHalFilter();
+    HidlResult res = mDescrambler->removePid(getHidlDemuxPid(in_pid), halFilter);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDescrambler::close() {
+    if (mDescrambler == nullptr) {
+        ALOGE("IDescrambler is not initialized.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDescrambler->close();
+    mDescrambler = nullptr;
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+HidlDemuxPid TunerHidlDescrambler::getHidlDemuxPid(const DemuxPid& pid) {
+    HidlDemuxPid hidlPid;
+    switch (pid.getTag()) {
+    case DemuxPid::tPid: {
+        hidlPid.tPid((uint16_t)pid.get<DemuxPid::Tag::tPid>());
+        break;
+    }
+    case DemuxPid::mmtpPid: {
+        hidlPid.mmtpPid((uint16_t)pid.get<DemuxPid::Tag::mmtpPid>());
+        break;
+    }
+    }
+    return hidlPid;
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlDescrambler.h b/services/tuner/hidl/TunerHidlDescrambler.h
new file mode 100644
index 0000000..9494968
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlDescrambler.h
@@ -0,0 +1,66 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLDESCRAMBLER_H
+#define ANDROID_MEDIA_TUNERHIDLDESCRAMBLER_H
+
+#include <aidl/android/hardware/tv/tuner/IDescrambler.h>
+#include <aidl/android/media/tv/tuner/BnTunerDescrambler.h>
+#include <android/hardware/tv/tuner/1.0/IDescrambler.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+
+using ::aidl::android::hardware::tv::tuner::DemuxPid;
+using ::android::sp;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+using HidlDemuxPid = ::android::hardware::tv::tuner::V1_0::DemuxPid;
+using HidlIDescrambler = ::android::hardware::tv::tuner::V1_0::IDescrambler;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+class TunerHidlDescrambler : public BnTunerDescrambler {
+public:
+    TunerHidlDescrambler(sp<HidlIDescrambler> descrambler);
+    virtual ~TunerHidlDescrambler();
+
+    ::ndk::ScopedAStatus setDemuxSource(const std::shared_ptr<ITunerDemux>& in_tunerDemux) override;
+    ::ndk::ScopedAStatus setKeyToken(const std::vector<uint8_t>& in_keyToken) override;
+    ::ndk::ScopedAStatus addPid(
+            const DemuxPid& in_pid,
+            const std::shared_ptr<ITunerFilter>& in_optionalSourceFilter) override;
+    ::ndk::ScopedAStatus removePid(
+            const DemuxPid& in_pid,
+            const std::shared_ptr<ITunerFilter>& in_optionalSourceFilter) override;
+    ::ndk::ScopedAStatus close() override;
+
+private:
+    HidlDemuxPid getHidlDemuxPid(const DemuxPid& pid);
+
+    sp<HidlIDescrambler> mDescrambler;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLDESCRAMBLER_H
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
new file mode 100644
index 0000000..1a619d5
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -0,0 +1,257 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerHidlDvr"
+
+#include "TunerHidlDvr.h"
+
+#include <aidl/android/hardware/tv/tuner/DataFormat.h>
+#include <aidl/android/hardware/tv/tuner/PlaybackStatus.h>
+#include <aidl/android/hardware/tv/tuner/RecordStatus.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <fmq/ConvertMQDescriptors.h>
+
+using ::aidl::android::hardware::tv::tuner::DataFormat;
+using ::aidl::android::hardware::tv::tuner::PlaybackStatus;
+using ::aidl::android::hardware::tv::tuner::RecordStatus;
+using ::aidl::android::hardware::tv::tuner::Result;
+using ::android::unsafeHidlToAidlMQDescriptor;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::MQDescriptorSync;
+
+using HidlDataFormat = ::android::hardware::tv::tuner::V1_0::DataFormat;
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+using MQDesc = MQDescriptorSync<uint8_t>;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlDvr::TunerHidlDvr(sp<HidlIDvr> dvr, DvrType type) {
+    mDvr = dvr;
+    mType = type;
+}
+
+TunerHidlDvr::~TunerHidlDvr() {
+    mDvr = nullptr;
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    MQDesc dvrMQDesc;
+    HidlResult res;
+    mDvr->getQueueDesc([&](HidlResult r, const MQDesc& desc) {
+        dvrMQDesc = desc;
+        res = r;
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    AidlMQDesc aidlMQDesc;
+    unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(dvrMQDesc, &aidlMQDesc);
+    *_aidl_return = move(aidlMQDesc);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::configure(const DvrSettings& in_settings) {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDvr->configure(getHidlDvrSettings(in_settings));
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (in_filter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    sp<HidlIFilter> hidlFilter = static_cast<TunerHidlFilter*>(in_filter.get())->getHalFilter();
+    if (hidlFilter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    HidlResult res = mDvr->attachFilter(hidlFilter);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (in_filter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    sp<HidlIFilter> halFilter = (static_cast<TunerHidlFilter*>(in_filter.get()))->getHalFilter();
+    if (halFilter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    HidlResult res = mDvr->detachFilter(halFilter);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::start() {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDvr->start();
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::stop() {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDvr->stop();
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::flush() {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDvr->flush();
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlDvr::close() {
+    if (mDvr == nullptr) {
+        ALOGE("IDvr is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mDvr->close();
+    mDvr = nullptr;
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+HidlDvrSettings TunerHidlDvr::getHidlDvrSettings(const DvrSettings& settings) {
+    HidlDvrSettings s;
+    switch (mType) {
+    case DvrType::PLAYBACK: {
+        s.playback({
+                .statusMask =
+                        static_cast<uint8_t>(settings.get<DvrSettings::playback>().statusMask),
+                .lowThreshold =
+                        static_cast<uint32_t>(settings.get<DvrSettings::playback>().lowThreshold),
+                .highThreshold =
+                        static_cast<uint32_t>(settings.get<DvrSettings::playback>().highThreshold),
+                .dataFormat = static_cast<HidlDataFormat>(
+                        settings.get<DvrSettings::playback>().dataFormat),
+                .packetSize =
+                        static_cast<uint8_t>(settings.get<DvrSettings::playback>().packetSize),
+        });
+        return s;
+    }
+    case DvrType::RECORD: {
+        s.record({
+                .statusMask = static_cast<uint8_t>(settings.get<DvrSettings::record>().statusMask),
+                .lowThreshold =
+                        static_cast<uint32_t>(settings.get<DvrSettings::record>().lowThreshold),
+                .highThreshold =
+                        static_cast<uint32_t>(settings.get<DvrSettings::record>().highThreshold),
+                .dataFormat =
+                        static_cast<HidlDataFormat>(settings.get<DvrSettings::record>().dataFormat),
+                .packetSize = static_cast<uint8_t>(settings.get<DvrSettings::record>().packetSize),
+        });
+        return s;
+    }
+    default:
+        break;
+    }
+    return s;
+}
+
+/////////////// IDvrCallback ///////////////////////
+Return<void> TunerHidlDvr::DvrCallback::onRecordStatus(const HidlRecordStatus status) {
+    if (mTunerDvrCallback != nullptr) {
+        mTunerDvrCallback->onRecordStatus(static_cast<RecordStatus>(status));
+    }
+    return Void();
+}
+
+Return<void> TunerHidlDvr::DvrCallback::onPlaybackStatus(const HidlPlaybackStatus status) {
+    if (mTunerDvrCallback != nullptr) {
+        mTunerDvrCallback->onPlaybackStatus(static_cast<PlaybackStatus>(status));
+    }
+    return Void();
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlDvr.h b/services/tuner/hidl/TunerHidlDvr.h
new file mode 100644
index 0000000..a280ff7
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlDvr.h
@@ -0,0 +1,91 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLDVR_H
+#define ANDROID_MEDIA_TUNERHIDLDVR_H
+
+#include <aidl/android/hardware/tv/tuner/DvrSettings.h>
+#include <aidl/android/hardware/tv/tuner/DvrType.h>
+#include <aidl/android/media/tv/tuner/BnTunerDvr.h>
+#include <aidl/android/media/tv/tuner/ITunerDvrCallback.h>
+#include <android/hardware/tv/tuner/1.0/IDvr.h>
+#include <android/hardware/tv/tuner/1.0/IDvrCallback.h>
+
+#include "TunerHidlFilter.h"
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::hardware::tv::tuner::DvrSettings;
+using ::aidl::android::hardware::tv::tuner::DvrType;
+using ::android::sp;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::std::shared_ptr;
+using ::std::vector;
+
+using HidlDvrSettings = ::android::hardware::tv::tuner::V1_0::DvrSettings;
+using HidlIDvr = ::android::hardware::tv::tuner::V1_0::IDvr;
+using HidlIDvrCallback = ::android::hardware::tv::tuner::V1_0::IDvrCallback;
+using HidlPlaybackStatus = ::android::hardware::tv::tuner::V1_0::PlaybackStatus;
+using HidlRecordStatus = ::android::hardware::tv::tuner::V1_0::RecordStatus;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
+
+class TunerHidlDvr : public BnTunerDvr {
+public:
+    TunerHidlDvr(sp<HidlIDvr> dvr, DvrType type);
+    ~TunerHidlDvr();
+
+    ::ndk::ScopedAStatus getQueueDesc(AidlMQDesc* _aidl_return) override;
+    ::ndk::ScopedAStatus configure(const DvrSettings& in_settings) override;
+    ::ndk::ScopedAStatus attachFilter(const shared_ptr<ITunerFilter>& in_filter) override;
+    ::ndk::ScopedAStatus detachFilter(const shared_ptr<ITunerFilter>& in_filter) override;
+    ::ndk::ScopedAStatus start() override;
+    ::ndk::ScopedAStatus stop() override;
+    ::ndk::ScopedAStatus flush() override;
+    ::ndk::ScopedAStatus close() override;
+
+    struct DvrCallback : public HidlIDvrCallback {
+        DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
+              : mTunerDvrCallback(tunerDvrCallback){};
+
+        virtual Return<void> onRecordStatus(const HidlRecordStatus status);
+        virtual Return<void> onPlaybackStatus(const HidlPlaybackStatus status);
+
+    private:
+        shared_ptr<ITunerDvrCallback> mTunerDvrCallback;
+    };
+
+private:
+    HidlDvrSettings getHidlDvrSettings(const DvrSettings& settings);
+
+    sp<HidlIDvr> mDvr;
+    DvrType mType;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLDVR_H
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
new file mode 100644
index 0000000..fe74a5c
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -0,0 +1,1286 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerHidlFilter"
+
+#include "TunerHidlFilter.h"
+
+#include <aidl/android/hardware/tv/tuner/Constant.h>
+#include <aidl/android/hardware/tv/tuner/DemuxScIndex.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <binder/IPCThreadState.h>
+#include <fmq/ConvertMQDescriptors.h>
+
+#include "TunerHelper.h"
+#include "TunerHidlService.h"
+
+using ::aidl::android::hardware::tv::tuner::AudioExtraMetaData;
+using ::aidl::android::hardware::tv::tuner::AudioStreamType;
+using ::aidl::android::hardware::tv::tuner::Constant;
+using ::aidl::android::hardware::tv::tuner::DemuxAlpFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxAlpFilterSettingsFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterDownloadEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterIpPayloadEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterMainType;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterMediaEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterMediaEventExtraMetaData;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterMmtpRecordEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterMonitorEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterPesEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterScIndexMask;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSectionBits;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSectionEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSectionSettingsCondition;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSectionSettingsConditionTableInfo;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSubType;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterTemiEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterTsRecordEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxIpAddress;
+using ::aidl::android::hardware::tv::tuner::DemuxIpAddressIpAddress;
+using ::aidl::android::hardware::tv::tuner::DemuxIpFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxIpFilterSettingsFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxMmtpFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxMmtpFilterSettingsFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxMmtpFilterType;
+using ::aidl::android::hardware::tv::tuner::DemuxPid;
+using ::aidl::android::hardware::tv::tuner::DemuxScIndex;
+using ::aidl::android::hardware::tv::tuner::DemuxTlvFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxTlvFilterSettingsFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxTsFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxTsFilterSettingsFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxTsFilterType;
+using ::aidl::android::hardware::tv::tuner::Result;
+using ::aidl::android::hardware::tv::tuner::ScramblingStatus;
+using ::android::dupToAidl;
+using ::android::IPCThreadState;
+using ::android::makeFromAidl;
+using ::android::unsafeHidlToAidlMQDescriptor;
+using ::android::hardware::hidl_handle;
+
+using HidlDemuxAlpLengthType = ::android::hardware::tv::tuner::V1_0::DemuxAlpLengthType;
+using HidlDemuxFilterMainType = ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using HidlDemuxIpAddress = ::android::hardware::tv::tuner::V1_0::DemuxIpAddress;
+using HidlDemuxMmtpFilterType = ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
+using HidlDemuxMmtpPid = ::android::hardware::tv::tuner::V1_0::DemuxMmtpPid;
+using HidlDemuxRecordScIndexType = ::android::hardware::tv::tuner::V1_0::DemuxRecordScIndexType;
+using HidlDemuxStreamId = ::android::hardware::tv::tuner::V1_0::DemuxStreamId;
+using HidlDemuxTsFilterType = ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+using HidlAudioStreamType = ::android::hardware::tv::tuner::V1_1::AudioStreamType;
+using HidlConstant = ::android::hardware::tv::tuner::V1_1::Constant;
+using HidlVideoStreamType = ::android::hardware::tv::tuner::V1_1::VideoStreamType;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlFilter::TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb,
+                                 DemuxFilterType type)
+      : mFilter(filter),
+        mType(type),
+        mStarted(false),
+        mShared(false),
+        mClientPid(-1),
+        mFilterCallback(cb) {
+    mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
+}
+
+TunerHidlFilter::~TunerHidlFilter() {
+    Mutex::Autolock _l(mLock);
+    mFilter = nullptr;
+    mFilter_1_1 = nullptr;
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    MQDesc filterMQDesc;
+    HidlResult res;
+    mFilter->getQueueDesc([&](HidlResult r, const MQDesc& desc) {
+        filterMQDesc = desc;
+        res = r;
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    AidlMQDesc aidlMQDesc;
+    unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(filterMQDesc, &aidlMQDesc);
+    *_aidl_return = move(aidlMQDesc);
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::getId(int32_t* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlResult res;
+    mFilter->getId([&](HidlResult r, uint32_t filterId) {
+        res = r;
+        mId = filterId;
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    *_aidl_return = mId;
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::getId64Bit(int64_t* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlResult res;
+    mFilter_1_1->getId64Bit([&](HidlResult r, uint64_t filterId) {
+        res = r;
+        mId64Bit = filterId;
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    *_aidl_return = mId64Bit;
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::configure(const DemuxFilterSettings& in_settings) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlDemuxFilterSettings settings;
+    switch (in_settings.getTag()) {
+    case DemuxFilterSettings::ts: {
+        getHidlTsSettings(in_settings, settings);
+        break;
+    }
+    case DemuxFilterSettings::mmtp: {
+        getHidlMmtpSettings(in_settings, settings);
+        break;
+    }
+    case DemuxFilterSettings::ip: {
+        getHidlIpSettings(in_settings, settings);
+        break;
+    }
+    case DemuxFilterSettings::tlv: {
+        getHidlTlvSettings(in_settings, settings);
+        break;
+    }
+    case DemuxFilterSettings::alp: {
+        getHidlAlpSettings(in_settings, settings);
+        break;
+    }
+    }
+
+    HidlResult res = mFilter->configure(settings);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::configureMonitorEvent(int32_t monitorEventType) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlResult res = mFilter_1_1->configureMonitorEvent(monitorEventType);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::configureIpFilterContextId(int32_t cid) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlResult res = mFilter_1_1->configureIpCid(cid);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::configureAvStreamType(const AvStreamType& in_avStreamType) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlAvStreamType type;
+    if (!getHidlAvStreamType(in_avStreamType, type)) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlResult res = mFilter_1_1->configureAvStreamType(type);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (filter == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    sp<HidlIFilter> hidlFilter = static_cast<TunerHidlFilter*>(filter.get())->getHalFilter();
+    HidlResult res = mFilter->setDataSource(hidlFilter);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::getAvSharedHandle(NativeHandle* out_avMemory,
+                                                        int64_t* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter_1_1 == nullptr) {
+        ALOGE("IFilter_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    HidlResult res;
+    mFilter_1_1->getAvSharedHandle([&](HidlResult r, hidl_handle avMemory, uint64_t avMemSize) {
+        res = r;
+        if (res == HidlResult::SUCCESS) {
+            *out_avMemory = dupToAidl(avMemory);
+            *_aidl_return = static_cast<int64_t>(avMemSize);
+        }
+    });
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::releaseAvHandle(const NativeHandle& in_handle,
+                                                      int64_t in_avDataId) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        ALOGD("%s is called on a shared filter", __FUNCTION__);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    hidl_handle handle;
+    handle.setTo(makeFromAidl(in_handle), true);
+    HidlResult res = mFilter->releaseAvHandle(handle, in_avDataId);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    // Call to HAL to make sure the transport FD was able to be closed by binder.
+    // This is a tricky workaround for a problem in Binder.
+    // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+    mFilter->getId([&](HidlResult /* r */, uint32_t /* filterId*/){});
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::start() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    HidlResult res = mFilter->start();
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    mStarted = true;
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::stop() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    HidlResult res = mFilter->stop();
+    mStarted = false;
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::flush() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            ALOGD("%s is called in wrong process", __FUNCTION__);
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::INVALID_STATE));
+        }
+    }
+
+    HidlResult res = mFilter->flush();
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::close() {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared) {
+        IPCThreadState* ipc = IPCThreadState::self();
+        int32_t callingPid = ipc->getCallingPid();
+        if (callingPid == mClientPid) {
+            if (mFilterCallback != nullptr) {
+                mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
+                mFilterCallback->detachSharedFilterCallback();
+            }
+            TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+        } else {
+            // Calling from shared process, do not really close this filter.
+            if (mFilterCallback != nullptr) {
+                mFilterCallback->detachSharedFilterCallback();
+            }
+            mStarted = false;
+            return ::ndk::ScopedAStatus::ok();
+        }
+    }
+
+    if (mFilterCallback != nullptr) {
+        mFilterCallback->detachCallbacks();
+    }
+    HidlResult res = mFilter->close();
+    mFilter = nullptr;
+    mFilter_1_1 = nullptr;
+    mStarted = false;
+    mShared = false;
+    mClientPid = -1;
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::acquireSharedFilterToken(string* _aidl_return) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (mShared || mStarted) {
+        ALOGD("create SharedFilter in wrong state");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    IPCThreadState* ipc = IPCThreadState::self();
+    mClientPid = ipc->getCallingPid();
+    string token =
+            TunerHidlService::getTunerService()->addFilterToShared(this->ref<TunerHidlFilter>());
+    _aidl_return->assign(token);
+    mShared = true;
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
+    Mutex::Autolock _l(mLock);
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (!mShared) {
+        // The filter is not shared or the shared filter has been closed.
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    if (mFilterCallback != nullptr) {
+        mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
+        mFilterCallback->detachSharedFilterCallback();
+    }
+
+    TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+    mShared = false;
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::getFilterType(DemuxFilterType* _aidl_return) {
+    if (mFilter == nullptr) {
+        ALOGE("IFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    *_aidl_return = mType;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFilter::setDelayHint(const FilterDelayHint&) {
+    // setDelayHint is not supported in HIDL HAL
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
+bool TunerHidlFilter::isSharedFilterAllowed(int callingPid) {
+    return mShared && mClientPid != callingPid;
+}
+
+void TunerHidlFilter::attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb) {
+    if (mFilterCallback != nullptr) {
+        mFilterCallback->attachSharedFilterCallback(in_cb);
+    }
+}
+
+sp<HidlIFilter> TunerHidlFilter::getHalFilter() {
+    return mFilter;
+}
+
+bool TunerHidlFilter::getHidlAvStreamType(const AvStreamType avStreamType, HidlAvStreamType& type) {
+    if (isAudioFilter()) {
+        AudioStreamType audio = avStreamType.get<AvStreamType::audio>();
+        if (static_cast<int32_t>(audio) > static_cast<int32_t>(HidlAudioStreamType::DRA)) {
+            return false;
+        }
+        type.audio(static_cast<HidlAudioStreamType>(audio));
+        return true;
+    }
+
+    if (isVideoFilter()) {
+        type.video(static_cast<HidlVideoStreamType>(avStreamType.get<AvStreamType::video>()));
+        return true;
+    }
+
+    return false;
+}
+
+bool TunerHidlFilter::isAudioFilter() {
+    return (mType.mainType == DemuxFilterMainType::TS &&
+            mType.subType.get<DemuxFilterSubType::tsFilterType>() == DemuxTsFilterType::AUDIO) ||
+           (mType.mainType == DemuxFilterMainType::MMTP &&
+            mType.subType.get<DemuxFilterSubType::mmtpFilterType>() == DemuxMmtpFilterType::AUDIO);
+}
+
+bool TunerHidlFilter::isVideoFilter() {
+    return (mType.mainType == DemuxFilterMainType::TS &&
+            mType.subType.get<DemuxFilterSubType::tsFilterType>() == DemuxTsFilterType::VIDEO) ||
+           (mType.mainType == DemuxFilterMainType::MMTP &&
+            mType.subType.get<DemuxFilterSubType::mmtpFilterType>() == DemuxMmtpFilterType::VIDEO);
+}
+
+void TunerHidlFilter::getHidlTsSettings(const DemuxFilterSettings& settings,
+                                        HidlDemuxFilterSettings& hidlSettings) {
+    const DemuxTsFilterSettings& tsConf = settings.get<DemuxFilterSettings::ts>();
+    HidlDemuxTsFilterSettings ts{
+            .tpid = static_cast<uint16_t>(tsConf.tpid),
+    };
+
+    switch (tsConf.filterSettings.getTag()) {
+    case DemuxTsFilterSettingsFilterSettings::av: {
+        ts.filterSettings.av(getHidlAvSettings(
+                tsConf.filterSettings.get<DemuxTsFilterSettingsFilterSettings::av>()));
+        break;
+    }
+    case DemuxTsFilterSettingsFilterSettings::section: {
+        ts.filterSettings.section(getHidlSectionSettings(
+                tsConf.filterSettings.get<DemuxTsFilterSettingsFilterSettings::section>()));
+        break;
+    }
+    case DemuxTsFilterSettingsFilterSettings::pesData: {
+        ts.filterSettings.pesData(getHidlPesDataSettings(
+                tsConf.filterSettings.get<DemuxTsFilterSettingsFilterSettings::pesData>()));
+        break;
+    }
+    case DemuxTsFilterSettingsFilterSettings::record: {
+        ts.filterSettings.record(getHidlRecordSettings(
+                tsConf.filterSettings.get<DemuxTsFilterSettingsFilterSettings::record>()));
+        break;
+    }
+    default: {
+        ts.filterSettings.noinit();
+        break;
+    }
+    }
+    hidlSettings.ts(ts);
+}
+
+void TunerHidlFilter::getHidlMmtpSettings(const DemuxFilterSettings& settings,
+                                          HidlDemuxFilterSettings& hidlSettings) {
+    const DemuxMmtpFilterSettings& mmtpConf = settings.get<DemuxFilterSettings::mmtp>();
+    HidlDemuxMmtpFilterSettings mmtp{
+            .mmtpPid = static_cast<HidlDemuxMmtpPid>(mmtpConf.mmtpPid),
+    };
+
+    switch (mmtpConf.filterSettings.getTag()) {
+    case DemuxMmtpFilterSettingsFilterSettings::av: {
+        mmtp.filterSettings.av(getHidlAvSettings(
+                mmtpConf.filterSettings.get<DemuxMmtpFilterSettingsFilterSettings::av>()));
+        break;
+    }
+    case DemuxMmtpFilterSettingsFilterSettings::section: {
+        mmtp.filterSettings.section(getHidlSectionSettings(
+                mmtpConf.filterSettings.get<DemuxMmtpFilterSettingsFilterSettings::section>()));
+        break;
+    }
+    case DemuxMmtpFilterSettingsFilterSettings::pesData: {
+        mmtp.filterSettings.pesData(getHidlPesDataSettings(
+                mmtpConf.filterSettings.get<DemuxMmtpFilterSettingsFilterSettings::pesData>()));
+        break;
+    }
+    case DemuxMmtpFilterSettingsFilterSettings::record: {
+        mmtp.filterSettings.record(getHidlRecordSettings(
+                mmtpConf.filterSettings.get<DemuxMmtpFilterSettingsFilterSettings::record>()));
+        break;
+    }
+    case DemuxMmtpFilterSettingsFilterSettings::download: {
+        mmtp.filterSettings.download(getHidlDownloadSettings(
+                mmtpConf.filterSettings.get<DemuxMmtpFilterSettingsFilterSettings::download>()));
+        break;
+    }
+    default: {
+        mmtp.filterSettings.noinit();
+        break;
+    }
+    }
+    hidlSettings.mmtp(mmtp);
+}
+
+void TunerHidlFilter::getHidlIpSettings(const DemuxFilterSettings& settings,
+                                        HidlDemuxFilterSettings& hidlSettings) {
+    const DemuxIpFilterSettings& ipConf = settings.get<DemuxFilterSettings::ip>();
+    HidlDemuxIpAddress ipAddr{
+            .srcPort = static_cast<uint16_t>(ipConf.ipAddr.srcPort),
+            .dstPort = static_cast<uint16_t>(ipConf.ipAddr.dstPort),
+    };
+
+    ipConf.ipAddr.srcIpAddress.getTag() == DemuxIpAddressIpAddress::v6
+            ? ipAddr.srcIpAddress.v6(getIpV6Address(ipConf.ipAddr.srcIpAddress))
+            : ipAddr.srcIpAddress.v4(getIpV4Address(ipConf.ipAddr.srcIpAddress));
+    ipConf.ipAddr.dstIpAddress.getTag() == DemuxIpAddressIpAddress::v6
+            ? ipAddr.dstIpAddress.v6(getIpV6Address(ipConf.ipAddr.dstIpAddress))
+            : ipAddr.dstIpAddress.v4(getIpV4Address(ipConf.ipAddr.dstIpAddress));
+
+    HidlDemuxIpFilterSettings ip;
+    ip.ipAddr = ipAddr;
+
+    switch (ipConf.filterSettings.getTag()) {
+    case DemuxIpFilterSettingsFilterSettings::section: {
+        ip.filterSettings.section(getHidlSectionSettings(
+                ipConf.filterSettings.get<DemuxIpFilterSettingsFilterSettings::section>()));
+        break;
+    }
+    case DemuxIpFilterSettingsFilterSettings::bPassthrough: {
+        ip.filterSettings.bPassthrough(
+                ipConf.filterSettings.get<DemuxIpFilterSettingsFilterSettings::bPassthrough>());
+        break;
+    }
+    default: {
+        ip.filterSettings.noinit();
+        break;
+    }
+    }
+    hidlSettings.ip(ip);
+}
+
+hidl_array<uint8_t, IP_V6_LENGTH> TunerHidlFilter::getIpV6Address(
+        const DemuxIpAddressIpAddress& addr) {
+    hidl_array<uint8_t, IP_V6_LENGTH> ip;
+    if (addr.get<DemuxIpAddressIpAddress::v6>().size() != IP_V6_LENGTH) {
+        return ip;
+    }
+    copy(addr.get<DemuxIpAddressIpAddress::v6>().begin(),
+         addr.get<DemuxIpAddressIpAddress::v6>().end(), ip.data());
+    return ip;
+}
+
+hidl_array<uint8_t, IP_V4_LENGTH> TunerHidlFilter::getIpV4Address(
+        const DemuxIpAddressIpAddress& addr) {
+    hidl_array<uint8_t, IP_V4_LENGTH> ip;
+    if (addr.get<DemuxIpAddressIpAddress::v4>().size() != IP_V4_LENGTH) {
+        return ip;
+    }
+    copy(addr.get<DemuxIpAddressIpAddress::v4>().begin(),
+         addr.get<DemuxIpAddressIpAddress::v4>().end(), ip.data());
+    return ip;
+}
+
+void TunerHidlFilter::getHidlTlvSettings(const DemuxFilterSettings& settings,
+                                         HidlDemuxFilterSettings& hidlSettings) {
+    const DemuxTlvFilterSettings& tlvConf = settings.get<DemuxFilterSettings::tlv>();
+    HidlDemuxTlvFilterSettings tlv{
+            .packetType = static_cast<uint8_t>(tlvConf.packetType),
+            .isCompressedIpPacket = tlvConf.isCompressedIpPacket,
+    };
+
+    switch (tlvConf.filterSettings.getTag()) {
+    case DemuxTlvFilterSettingsFilterSettings::section: {
+        tlv.filterSettings.section(getHidlSectionSettings(
+                tlvConf.filterSettings.get<DemuxTlvFilterSettingsFilterSettings::section>()));
+        break;
+    }
+    case DemuxTlvFilterSettingsFilterSettings::bPassthrough: {
+        tlv.filterSettings.bPassthrough(
+                tlvConf.filterSettings.get<DemuxTlvFilterSettingsFilterSettings::bPassthrough>());
+        break;
+    }
+    default: {
+        tlv.filterSettings.noinit();
+        break;
+    }
+    }
+    hidlSettings.tlv(tlv);
+}
+
+void TunerHidlFilter::getHidlAlpSettings(const DemuxFilterSettings& settings,
+                                         HidlDemuxFilterSettings& hidlSettings) {
+    const DemuxAlpFilterSettings& alpConf = settings.get<DemuxFilterSettings::alp>();
+    HidlDemuxAlpFilterSettings alp{
+            .packetType = static_cast<uint8_t>(alpConf.packetType),
+            .lengthType = static_cast<HidlDemuxAlpLengthType>(alpConf.lengthType),
+    };
+
+    switch (alpConf.filterSettings.getTag()) {
+    case DemuxAlpFilterSettingsFilterSettings::section: {
+        alp.filterSettings.section(getHidlSectionSettings(
+                alpConf.filterSettings.get<DemuxAlpFilterSettingsFilterSettings::section>()));
+        break;
+    }
+    default: {
+        alp.filterSettings.noinit();
+        break;
+    }
+    }
+    hidlSettings.alp(alp);
+}
+
+HidlDemuxFilterAvSettings TunerHidlFilter::getHidlAvSettings(
+        const DemuxFilterAvSettings& settings) {
+    HidlDemuxFilterAvSettings av{
+            .isPassthrough = settings.isPassthrough,
+    };
+    return av;
+}
+
+HidlDemuxFilterSectionSettings TunerHidlFilter::getHidlSectionSettings(
+        const DemuxFilterSectionSettings& settings) {
+    HidlDemuxFilterSectionSettings section{
+            .isCheckCrc = settings.isCheckCrc,
+            .isRepeat = settings.isRepeat,
+            .isRaw = settings.isRaw,
+    };
+
+    switch (settings.condition.getTag()) {
+    case DemuxFilterSectionSettingsCondition::sectionBits: {
+        const DemuxFilterSectionBits& sectionBits =
+                settings.condition.get<DemuxFilterSectionSettingsCondition::sectionBits>();
+        vector<uint8_t> filter(sectionBits.filter.begin(), sectionBits.filter.end());
+        vector<uint8_t> mask(sectionBits.mask.begin(), sectionBits.mask.end());
+        vector<uint8_t> mode(sectionBits.mode.begin(), sectionBits.mode.end());
+        section.condition.sectionBits({
+                .filter = filter,
+                .mask = mask,
+                .mode = mode,
+        });
+        break;
+    }
+    case DemuxFilterSectionSettingsCondition::tableInfo: {
+        const DemuxFilterSectionSettingsConditionTableInfo& tableInfo =
+                settings.condition.get<DemuxFilterSectionSettingsCondition::tableInfo>();
+        section.condition.tableInfo({
+                .tableId = static_cast<uint16_t>(tableInfo.tableId),
+                .version = static_cast<uint16_t>(tableInfo.version),
+        });
+        break;
+    }
+    default: {
+        break;
+    }
+    }
+    return section;
+}
+
+HidlDemuxFilterPesDataSettings TunerHidlFilter::getHidlPesDataSettings(
+        const DemuxFilterPesDataSettings& settings) {
+    HidlDemuxFilterPesDataSettings pes{
+            .streamId = static_cast<HidlDemuxStreamId>(settings.streamId),
+            .isRaw = settings.isRaw,
+    };
+    return pes;
+}
+
+HidlDemuxFilterRecordSettings TunerHidlFilter::getHidlRecordSettings(
+        const DemuxFilterRecordSettings& settings) {
+    HidlDemuxFilterRecordSettings record{
+            .tsIndexMask = static_cast<uint32_t>(settings.tsIndexMask),
+    };
+
+    switch (settings.scIndexMask.getTag()) {
+    case DemuxFilterScIndexMask::scIndex: {
+        record.scIndexType = static_cast<HidlDemuxRecordScIndexType>(settings.scIndexType);
+        record.scIndexMask.sc(
+                static_cast<uint32_t>(settings.scIndexMask.get<DemuxFilterScIndexMask::scIndex>()));
+        break;
+    }
+    case DemuxFilterScIndexMask::scAvc: {
+        record.scIndexType = HidlDemuxRecordScIndexType::SC;
+        uint32_t index =
+                static_cast<uint32_t>(settings.scIndexMask.get<DemuxFilterScIndexMask::scAvc>());
+        // HIDL HAL starting from 1 << 4; AIDL starting from 1 << 0.
+        index = index << 4;
+        record.scIndexMask.sc(index);
+        break;
+    }
+    case DemuxFilterScIndexMask::scHevc: {
+        record.scIndexType = static_cast<HidlDemuxRecordScIndexType>(settings.scIndexType);
+        record.scIndexMask.scHevc(
+                static_cast<uint32_t>(settings.scIndexMask.get<DemuxFilterScIndexMask::scHevc>()));
+        break;
+    }
+    }
+    return record;
+}
+
+HidlDemuxFilterDownloadSettings TunerHidlFilter::getHidlDownloadSettings(
+        const DemuxFilterDownloadSettings& settings) {
+    HidlDemuxFilterDownloadSettings download{
+            .downloadId = static_cast<uint32_t>(settings.downloadId),
+    };
+    return download;
+}
+
+/////////////// FilterCallback ///////////////////////
+Return<void> TunerHidlFilter::FilterCallback::onFilterStatus(HidlDemuxFilterStatus status) {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr) {
+        mTunerFilterCallback->onFilterStatus(static_cast<DemuxFilterStatus>(status));
+    }
+    return Void();
+}
+
+Return<void> TunerHidlFilter::FilterCallback::onFilterEvent(
+        const HidlDemuxFilterEvent& filterEvent) {
+    vector<HidlDemuxFilterEventExt::Event> emptyEventsExt;
+    HidlDemuxFilterEventExt emptyFilterEventExt{
+            .events = emptyEventsExt,
+    };
+    onFilterEvent_1_1(filterEvent, emptyFilterEventExt);
+    return Void();
+}
+
+Return<void> TunerHidlFilter::FilterCallback::onFilterEvent_1_1(
+        const HidlDemuxFilterEvent& filterEvent, const HidlDemuxFilterEventExt& filterEventExt) {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr) {
+        vector<HidlDemuxFilterEvent::Event> events = filterEvent.events;
+        vector<HidlDemuxFilterEventExt::Event> eventsExt = filterEventExt.events;
+        vector<DemuxFilterEvent> tunerEvents;
+
+        getAidlFilterEvent(events, eventsExt, tunerEvents);
+        mTunerFilterCallback->onFilterEvent(tunerEvents);
+    }
+    return Void();
+}
+
+void TunerHidlFilter::FilterCallback::sendSharedFilterStatus(int32_t status) {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr && mOriginalCallback != nullptr) {
+        mTunerFilterCallback->onFilterStatus(static_cast<DemuxFilterStatus>(status));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::attachSharedFilterCallback(
+        const shared_ptr<ITunerFilterCallback>& in_cb) {
+    Mutex::Autolock _l(mCallbackLock);
+    mOriginalCallback = mTunerFilterCallback;
+    mTunerFilterCallback = in_cb;
+}
+
+void TunerHidlFilter::FilterCallback::detachSharedFilterCallback() {
+    Mutex::Autolock _l(mCallbackLock);
+    if (mTunerFilterCallback != nullptr && mOriginalCallback != nullptr) {
+        mTunerFilterCallback = mOriginalCallback;
+        mOriginalCallback = nullptr;
+    }
+}
+
+void TunerHidlFilter::FilterCallback::detachCallbacks() {
+    Mutex::Autolock _l(mCallbackLock);
+    mOriginalCallback = nullptr;
+    mTunerFilterCallback = nullptr;
+}
+
+/////////////// FilterCallback Helper Methods ///////////////////////
+void TunerHidlFilter::FilterCallback::getAidlFilterEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events,
+        const vector<HidlDemuxFilterEventExt::Event>& eventsExt,
+        vector<DemuxFilterEvent>& aidlEvents) {
+    if (events.empty() && !eventsExt.empty()) {
+        switch (eventsExt[0].getDiscriminator()) {
+        case HidlDemuxFilterEventExt::Event::hidl_discriminator::monitorEvent: {
+            getMonitorEvent(eventsExt, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEventExt::Event::hidl_discriminator::startId: {
+            getRestartEvent(eventsExt, aidlEvents);
+            break;
+        }
+        default: {
+            break;
+        }
+        }
+    }
+
+    if (!events.empty()) {
+        switch (events[0].getDiscriminator()) {
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::media: {
+            getMediaEvent(events, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::section: {
+            getSectionEvent(events, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::pes: {
+            getPesEvent(events, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::tsRecord: {
+            getTsRecordEvent(events, eventsExt, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::mmtpRecord: {
+            getMmtpRecordEvent(events, eventsExt, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::download: {
+            getDownloadEvent(events, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::ipPayload: {
+            getIpPayloadEvent(events, aidlEvents);
+            break;
+        }
+        case HidlDemuxFilterEvent::Event::hidl_discriminator::temi: {
+            getTemiEvent(events, aidlEvents);
+            break;
+        }
+        default: {
+            break;
+        }
+        }
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getMediaEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        const HidlDemuxFilterMediaEvent& mediaEvent = events[i].media();
+        DemuxFilterMediaEvent media;
+
+        media.streamId = static_cast<int32_t>(mediaEvent.streamId);
+        media.isPtsPresent = mediaEvent.isPtsPresent;
+        media.pts = static_cast<int64_t>(mediaEvent.pts);
+        media.isDtsPresent = false;
+        media.dts = static_cast<int64_t>(-1);
+        media.dataLength = static_cast<int64_t>(mediaEvent.dataLength);
+        media.offset = static_cast<int64_t>(mediaEvent.offset);
+        media.isSecureMemory = mediaEvent.isSecureMemory;
+        media.avDataId = static_cast<int64_t>(mediaEvent.avDataId);
+        media.mpuSequenceNumber = static_cast<int32_t>(mediaEvent.mpuSequenceNumber);
+        media.isPesPrivateData = mediaEvent.isPesPrivateData;
+        media.scIndexMask.set<DemuxFilterScIndexMask::scIndex>(
+                static_cast<int32_t>(DemuxScIndex::UNDEFINED));
+
+        if (mediaEvent.extraMetaData.getDiscriminator() ==
+            HidlDemuxFilterMediaEvent::ExtraMetaData::hidl_discriminator::audio) {
+            AudioExtraMetaData audio;
+            audio.adFade = static_cast<int8_t>(mediaEvent.extraMetaData.audio().adFade);
+            audio.adPan = static_cast<int8_t>(mediaEvent.extraMetaData.audio().adPan);
+            audio.versionTextTag =
+                    static_cast<int16_t>(mediaEvent.extraMetaData.audio().versionTextTag);
+            audio.adGainCenter = static_cast<int8_t>(mediaEvent.extraMetaData.audio().adGainCenter);
+            audio.adGainFront = static_cast<int8_t>(mediaEvent.extraMetaData.audio().adGainFront);
+            audio.adGainSurround =
+                    static_cast<int8_t>(mediaEvent.extraMetaData.audio().adGainSurround);
+            media.extraMetaData.set<DemuxFilterMediaEventExtraMetaData::audio>(audio);
+        } else {
+            media.extraMetaData.set<DemuxFilterMediaEventExtraMetaData::noinit>(true);
+        }
+
+        if (mediaEvent.avMemory.getNativeHandle() != nullptr) {
+            media.avMemory = dupToAidl(mediaEvent.avMemory.getNativeHandle());
+        }
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::media>(move(media));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getSectionEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        const HidlDemuxFilterSectionEvent& sectionEvent = events[i].section();
+        DemuxFilterSectionEvent section;
+
+        section.tableId = static_cast<int32_t>(sectionEvent.tableId);
+        section.version = static_cast<int32_t>(sectionEvent.version);
+        section.sectionNum = static_cast<int32_t>(sectionEvent.sectionNum);
+        section.dataLength = static_cast<int64_t>(sectionEvent.dataLength);
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::section>(move(section));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getPesEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                                                  vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        const HidlDemuxFilterPesEvent& pesEvent = events[i].pes();
+        DemuxFilterPesEvent pes;
+
+        pes.streamId = static_cast<int32_t>(pesEvent.streamId);
+        pes.dataLength = static_cast<int32_t>(pesEvent.dataLength);
+        pes.mpuSequenceNumber = static_cast<int32_t>(pesEvent.mpuSequenceNumber);
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::pes>(move(pes));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getTsRecordEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events,
+        const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        DemuxFilterTsRecordEvent tsRecord;
+        const HidlDemuxFilterTsRecordEvent& tsRecordEvent = events[i].tsRecord();
+
+        DemuxFilterScIndexMask scIndexMask;
+        if (tsRecordEvent.scIndexMask.getDiscriminator() ==
+            HidlDemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::sc) {
+            int32_t hidlScIndex = static_cast<int32_t>(tsRecordEvent.scIndexMask.sc());
+            if (hidlScIndex <= static_cast<int32_t>(DemuxScIndex::SEQUENCE)) {
+                scIndexMask.set<DemuxFilterScIndexMask::scIndex>(hidlScIndex);
+            } else {
+                // HIDL HAL starting from 1 << 4; AIDL starting from 1 << 0.
+                scIndexMask.set<DemuxFilterScIndexMask::scAvc>(hidlScIndex >> 4);
+            }
+        } else if (tsRecordEvent.scIndexMask.getDiscriminator() ==
+                   HidlDemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::scHevc) {
+            scIndexMask.set<DemuxFilterScIndexMask::scHevc>(
+                    static_cast<int32_t>(tsRecordEvent.scIndexMask.scHevc()));
+        }
+
+        if (tsRecordEvent.pid.getDiscriminator() == HidlDemuxPid::hidl_discriminator::tPid) {
+            DemuxPid pid;
+            pid.set<DemuxPid::tPid>(static_cast<int32_t>(tsRecordEvent.pid.tPid()));
+            tsRecord.pid = pid;
+        } else {
+            DemuxPid pid;
+            pid.set<DemuxPid::tPid>(static_cast<int32_t>(Constant::INVALID_TS_PID));
+            tsRecord.pid = pid;
+        }
+
+        tsRecord.scIndexMask = scIndexMask;
+        tsRecord.tsIndexMask = static_cast<int32_t>(tsRecordEvent.tsIndexMask);
+        tsRecord.byteNumber = static_cast<int64_t>(tsRecordEvent.byteNumber);
+
+        if (eventsExt.size() > i &&
+            eventsExt[i].getDiscriminator() ==
+                    HidlDemuxFilterEventExt::Event::hidl_discriminator::tsRecord) {
+            tsRecord.pts = static_cast<int64_t>(eventsExt[i].tsRecord().pts);
+            tsRecord.firstMbInSlice = static_cast<int32_t>(eventsExt[i].tsRecord().firstMbInSlice);
+        }
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::tsRecord>(move(tsRecord));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getMmtpRecordEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events,
+        const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        DemuxFilterMmtpRecordEvent mmtpRecord;
+        const HidlDemuxFilterMmtpRecordEvent& mmtpRecordEvent = events[i].mmtpRecord();
+
+        mmtpRecord.scHevcIndexMask = static_cast<int32_t>(mmtpRecordEvent.scHevcIndexMask);
+        mmtpRecord.byteNumber = static_cast<int64_t>(mmtpRecordEvent.byteNumber);
+
+        if (eventsExt.size() > i &&
+            eventsExt[i].getDiscriminator() ==
+                    HidlDemuxFilterEventExt::Event::hidl_discriminator::mmtpRecord) {
+            mmtpRecord.pts = static_cast<int64_t>(eventsExt[i].mmtpRecord().pts);
+            mmtpRecord.mpuSequenceNumber =
+                    static_cast<int32_t>(eventsExt[i].mmtpRecord().mpuSequenceNumber);
+            mmtpRecord.firstMbInSlice =
+                    static_cast<int32_t>(eventsExt[i].mmtpRecord().firstMbInSlice);
+            mmtpRecord.tsIndexMask = static_cast<int32_t>(eventsExt[i].mmtpRecord().tsIndexMask);
+        }
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::mmtpRecord>(move(mmtpRecord));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getDownloadEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        const HidlDemuxFilterDownloadEvent& downloadEvent = events[i].download();
+        DemuxFilterDownloadEvent download;
+
+        download.itemId = static_cast<int32_t>(downloadEvent.itemId);
+        download.downloadId = -1;
+        download.itemFragmentIndex = static_cast<int32_t>(downloadEvent.itemFragmentIndex);
+        download.mpuSequenceNumber = static_cast<int32_t>(downloadEvent.mpuSequenceNumber);
+        download.lastItemFragmentIndex = static_cast<int32_t>(downloadEvent.lastItemFragmentIndex);
+        download.dataLength = static_cast<int32_t>(downloadEvent.dataLength);
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::download>(move(download));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getIpPayloadEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        const HidlDemuxFilterIpPayloadEvent& ipPayloadEvent = events[i].ipPayload();
+        DemuxFilterIpPayloadEvent ipPayload;
+
+        ipPayload.dataLength = static_cast<int32_t>(ipPayloadEvent.dataLength);
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::ipPayload>(move(ipPayload));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getTemiEvent(
+        const vector<HidlDemuxFilterEvent::Event>& events, vector<DemuxFilterEvent>& res) {
+    for (int i = 0; i < events.size(); i++) {
+        const HidlDemuxFilterTemiEvent& temiEvent = events[i].temi();
+        DemuxFilterTemiEvent temi;
+
+        temi.pts = static_cast<int64_t>(temiEvent.pts);
+        temi.descrTag = static_cast<int8_t>(temiEvent.descrTag);
+        vector<uint8_t> descrData = temiEvent.descrData;
+        temi.descrData.resize(descrData.size());
+        copy(descrData.begin(), descrData.end(), temi.descrData.begin());
+
+        DemuxFilterEvent filterEvent;
+        filterEvent.set<DemuxFilterEvent::temi>(move(temi));
+        res.push_back(move(filterEvent));
+    }
+}
+
+void TunerHidlFilter::FilterCallback::getMonitorEvent(
+        const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
+    HidlDemuxFilterMonitorEvent monitorEvent = eventsExt[0].monitorEvent();
+    DemuxFilterMonitorEvent monitor;
+
+    switch (monitorEvent.getDiscriminator()) {
+    case HidlDemuxFilterMonitorEvent::hidl_discriminator::scramblingStatus: {
+        monitor.set<DemuxFilterMonitorEvent::scramblingStatus>(
+                static_cast<ScramblingStatus>(monitorEvent.scramblingStatus()));
+        break;
+    }
+    case HidlDemuxFilterMonitorEvent::hidl_discriminator::cid: {
+        monitor.set<DemuxFilterMonitorEvent::cid>(static_cast<int32_t>(monitorEvent.cid()));
+        break;
+    }
+    }
+
+    DemuxFilterEvent filterEvent;
+    filterEvent.set<DemuxFilterEvent::monitorEvent>(move(monitor));
+    res.push_back(move(filterEvent));
+}
+
+void TunerHidlFilter::FilterCallback::getRestartEvent(
+        const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
+    DemuxFilterEvent filterEvent;
+    filterEvent.set<DemuxFilterEvent::startId>(static_cast<int32_t>(eventsExt[0].startId()));
+    res.push_back(move(filterEvent));
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlFilter.h b/services/tuner/hidl/TunerHidlFilter.h
new file mode 100644
index 0000000..63c7a1b
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlFilter.h
@@ -0,0 +1,241 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLFILTER_H
+#define ANDROID_MEDIA_TUNERHIDLFILTER_H
+
+#include <aidl/android/hardware/tv/tuner/AvStreamType.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterAvSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterDownloadSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterEvent.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterPesDataSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterRecordSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterSectionSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterSettings.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterStatus.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterType.h>
+#include <aidl/android/media/tv/tuner/BnTunerFilter.h>
+#include <aidl/android/media/tv/tuner/ITunerFilterCallback.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <android/hardware/tv/tuner/1.1/IFilter.h>
+#include <android/hardware/tv/tuner/1.1/IFilterCallback.h>
+#include <android/hardware/tv/tuner/1.1/types.h>
+#include <fmq/MessageQueue.h>
+#include <utils/Mutex.h>
+
+#include <map>
+
+using ::aidl::android::hardware::common::NativeHandle;
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::aidl::android::hardware::tv::tuner::AvStreamType;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterAvSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterDownloadSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterPesDataSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterRecordSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSectionSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterSettings;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterType;
+using ::aidl::android::hardware::tv::tuner::DemuxIpAddressIpAddress;
+using ::aidl::android::hardware::tv::tuner::FilterDelayHint;
+using ::aidl::android::media::tv::tuner::BnTunerFilter;
+using ::aidl::android::media::tv::tuner::ITunerFilterCallback;
+using ::android::Mutex;
+using ::android::sp;
+using ::android::hardware::hidl_array;
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::std::shared_ptr;
+using ::std::string;
+using ::std::vector;
+
+using HidlAvStreamType = ::android::hardware::tv::tuner::V1_1::AvStreamType;
+using HidlDemuxAlpFilterSettings = ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterSettings;
+using HidlDemuxFilterAvSettings = ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using HidlDemuxFilterDownloadEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadEvent;
+using HidlDemuxFilterDownloadSettings =
+        ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadSettings;
+using HidlDemuxFilterIpPayloadEvent =
+        ::android::hardware::tv::tuner::V1_0::DemuxFilterIpPayloadEvent;
+using HidlDemuxFilterEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
+using HidlDemuxFilterMediaEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterMediaEvent;
+using HidlDemuxFilterMmtpRecordEvent =
+        ::android::hardware::tv::tuner::V1_0::DemuxFilterMmtpRecordEvent;
+using HidlDemuxFilterPesDataSettings =
+        ::android::hardware::tv::tuner::V1_0::DemuxFilterPesDataSettings;
+using HidlDemuxFilterPesEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterPesEvent;
+using HidlDemuxFilterRecordSettings =
+        ::android::hardware::tv::tuner::V1_0::DemuxFilterRecordSettings;
+using HidlDemuxFilterSectionEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionEvent;
+using HidlDemuxFilterSectionSettings =
+        ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionSettings;
+using HidlDemuxFilterSettings = ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using HidlDemuxFilterStatus = ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
+using HidlDemuxFilterTemiEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterTemiEvent;
+using HidlDemuxFilterTsRecordEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterTsRecordEvent;
+using HidlDemuxIpFilterSettings = ::android::hardware::tv::tuner::V1_0::DemuxIpFilterSettings;
+using HidlDemuxMmtpFilterSettings = ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterSettings;
+using HidlDemuxTlvFilterSettings = ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterSettings;
+using HidlDemuxTsFilterSettings = ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
+using HidlDemuxPid = ::android::hardware::tv::tuner::V1_0::DemuxPid;
+using HidlIFilter = ::android::hardware::tv::tuner::V1_0::IFilter;
+using HidlDvStreamType = ::android::hardware::tv::tuner::V1_1::AvStreamType;
+using HidlDemuxFilterEventExt = ::android::hardware::tv::tuner::V1_1::DemuxFilterEventExt;
+using HidlDemuxFilterMonitorEvent = ::android::hardware::tv::tuner::V1_1::DemuxFilterMonitorEvent;
+using HidlDemuxFilterTsRecordEventExt =
+        ::android::hardware::tv::tuner::V1_1::DemuxFilterTsRecordEventExt;
+using HidlIFilterCallback = ::android::hardware::tv::tuner::V1_1::IFilterCallback;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+using MQDesc = MQDescriptorSync<uint8_t>;
+using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
+
+const static int IP_V4_LENGTH = 4;
+const static int IP_V6_LENGTH = 16;
+
+class TunerHidlFilter : public BnTunerFilter {
+public:
+    class FilterCallback : public HidlIFilterCallback {
+    public:
+        FilterCallback(const shared_ptr<ITunerFilterCallback> tunerFilterCallback)
+              : mTunerFilterCallback(tunerFilterCallback){};
+
+        virtual Return<void> onFilterEvent(const HidlDemuxFilterEvent& filterEvent);
+        virtual Return<void> onFilterEvent_1_1(const HidlDemuxFilterEvent& filterEvent,
+                                               const HidlDemuxFilterEventExt& filterEventExt);
+        virtual Return<void> onFilterStatus(HidlDemuxFilterStatus status);
+
+        void sendSharedFilterStatus(int32_t status);
+        void attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb);
+        void detachSharedFilterCallback();
+        void detachCallbacks();
+
+    private:
+        void getAidlFilterEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                                const vector<HidlDemuxFilterEventExt::Event>& eventsExt,
+                                vector<DemuxFilterEvent>& aidlEvents);
+
+        void getMediaEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                           vector<DemuxFilterEvent>& res);
+        void getSectionEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                             vector<DemuxFilterEvent>& res);
+        void getPesEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                         vector<DemuxFilterEvent>& res);
+        void getTsRecordEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                              const vector<HidlDemuxFilterEventExt::Event>& eventsExt,
+                              vector<DemuxFilterEvent>& res);
+        void getMmtpRecordEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                                const vector<HidlDemuxFilterEventExt::Event>& eventsExt,
+                                vector<DemuxFilterEvent>& res);
+        void getDownloadEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                              vector<DemuxFilterEvent>& res);
+        void getIpPayloadEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                               vector<DemuxFilterEvent>& res);
+        void getTemiEvent(const vector<HidlDemuxFilterEvent::Event>& events,
+                          vector<DemuxFilterEvent>& res);
+        void getMonitorEvent(const vector<HidlDemuxFilterEventExt::Event>& eventsExt,
+                             vector<DemuxFilterEvent>& res);
+        void getRestartEvent(const vector<HidlDemuxFilterEventExt::Event>& eventsExt,
+                             vector<DemuxFilterEvent>& res);
+
+    private:
+        shared_ptr<ITunerFilterCallback> mTunerFilterCallback;
+        shared_ptr<ITunerFilterCallback> mOriginalCallback;
+        Mutex mCallbackLock;
+    };
+
+    TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb, DemuxFilterType type);
+    virtual ~TunerHidlFilter();
+
+    ::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getId64Bit(int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getQueueDesc(AidlMQDesc* _aidl_return) override;
+    ::ndk::ScopedAStatus configure(const DemuxFilterSettings& in_settings) override;
+    ::ndk::ScopedAStatus configureMonitorEvent(int32_t in_monitorEventTypes) override;
+    ::ndk::ScopedAStatus configureIpFilterContextId(int32_t in_cid) override;
+    ::ndk::ScopedAStatus configureAvStreamType(const AvStreamType& in_avStreamType) override;
+    ::ndk::ScopedAStatus getAvSharedHandle(NativeHandle* out_avMemory,
+                                           int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus releaseAvHandle(const NativeHandle& in_handle,
+                                         int64_t in_avDataId) override;
+    ::ndk::ScopedAStatus setDataSource(const shared_ptr<ITunerFilter>& in_filter) override;
+    ::ndk::ScopedAStatus start() override;
+    ::ndk::ScopedAStatus stop() override;
+    ::ndk::ScopedAStatus flush() override;
+    ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus acquireSharedFilterToken(string* _aidl_return) override;
+    ::ndk::ScopedAStatus freeSharedFilterToken(const string& in_filterToken) override;
+    ::ndk::ScopedAStatus getFilterType(DemuxFilterType* _aidl_return) override;
+    ::ndk::ScopedAStatus setDelayHint(const FilterDelayHint& in_hint) override;
+
+    bool isSharedFilterAllowed(int32_t pid);
+    void attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb);
+    sp<HidlIFilter> getHalFilter();
+
+private:
+    bool isAudioFilter();
+    bool isVideoFilter();
+
+    HidlDemuxFilterAvSettings getHidlAvSettings(const DemuxFilterAvSettings& settings);
+    HidlDemuxFilterSectionSettings getHidlSectionSettings(
+            const DemuxFilterSectionSettings& settings);
+    HidlDemuxFilterPesDataSettings getHidlPesDataSettings(
+            const DemuxFilterPesDataSettings& settings);
+    HidlDemuxFilterRecordSettings getHidlRecordSettings(const DemuxFilterRecordSettings& settings);
+    HidlDemuxFilterDownloadSettings getHidlDownloadSettings(
+            const DemuxFilterDownloadSettings& settings);
+    bool getHidlAvStreamType(const AvStreamType avStreamType, HidlAvStreamType& type);
+    void getHidlTsSettings(const DemuxFilterSettings& settings,
+                           HidlDemuxFilterSettings& hidlSettings);
+    void getHidlMmtpSettings(const DemuxFilterSettings& settings,
+                             HidlDemuxFilterSettings& hidlSettings);
+    void getHidlIpSettings(const DemuxFilterSettings& settings,
+                           HidlDemuxFilterSettings& hidlSettings);
+    void getHidlTlvSettings(const DemuxFilterSettings& settings,
+                            HidlDemuxFilterSettings& hidlSettings);
+    void getHidlAlpSettings(const DemuxFilterSettings& settings,
+                            HidlDemuxFilterSettings& hidlSettings);
+
+    hidl_array<uint8_t, IP_V4_LENGTH> getIpV4Address(const DemuxIpAddressIpAddress& addr);
+    hidl_array<uint8_t, IP_V6_LENGTH> getIpV6Address(const DemuxIpAddressIpAddress& addr);
+
+    sp<HidlIFilter> mFilter;
+    sp<::android::hardware::tv::tuner::V1_1::IFilter> mFilter_1_1;
+    int32_t mId;
+    int64_t mId64Bit;
+    DemuxFilterType mType;
+    bool mStarted;
+    bool mShared;
+    int32_t mClientPid;
+    sp<FilterCallback> mFilterCallback;
+    Mutex mLock;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLFILTER_H
diff --git a/services/tuner/hidl/TunerHidlFrontend.cpp b/services/tuner/hidl/TunerHidlFrontend.cpp
new file mode 100644
index 0000000..03957f3
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlFrontend.cpp
@@ -0,0 +1,1224 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TunerHidlFrontend"
+
+#include "TunerHidlFrontend.h"
+
+#include <aidl/android/hardware/tv/tuner/Result.h>
+
+#include "TunerHidlLnb.h"
+#include "TunerHidlService.h"
+
+using ::aidl::android::hardware::tv::tuner::FrontendAnalogSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendAnalogSifStandard;
+using ::aidl::android::hardware::tv::tuner::FrontendAnalogType;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3Bandwidth;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3CodeRate;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3Fec;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3Modulation;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3PlpSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3Settings;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3TimeInterleaveMode;
+using ::aidl::android::hardware::tv::tuner::FrontendAtscModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendAtscSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendBandwidth;
+using ::aidl::android::hardware::tv::tuner::FrontendCableTimeInterleaveMode;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbBandwidth;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbGuardInterval;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbTimeInterleaveMode;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbTransmissionMode;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbcAnnex;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbcBandwidth;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbcModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbcSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbsModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbsRolloff;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbsSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbsStandard;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtBandwidth;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtConstellation;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtGuardInterval;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtHierarchy;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtStandard;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtTransmissionMode;
+using ::aidl::android::hardware::tv::tuner::FrontendGuardInterval;
+using ::aidl::android::hardware::tv::tuner::FrontendInnerFec;
+using ::aidl::android::hardware::tv::tuner::FrontendInterleaveMode;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbs3Modulation;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbs3Rolloff;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbs3Settings;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbsModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbsRolloff;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbsSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtBandwidth;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtCoderate;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtGuardInterval;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtMode;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendModulation;
+using ::aidl::android::hardware::tv::tuner::FrontendModulationStatus;
+using ::aidl::android::hardware::tv::tuner::FrontendRollOff;
+using ::aidl::android::hardware::tv::tuner::FrontendScanAtsc3PlpInfo;
+using ::aidl::android::hardware::tv::tuner::FrontendScanMessageStandard;
+using ::aidl::android::hardware::tv::tuner::FrontendSpectralInversion;
+using ::aidl::android::hardware::tv::tuner::FrontendStatusAtsc3PlpInfo;
+using ::aidl::android::hardware::tv::tuner::FrontendTransmissionMode;
+using ::aidl::android::hardware::tv::tuner::Result;
+
+using HidlFrontendStatusAtsc3PlpInfo =
+        ::aidl::android::hardware::tv::tuner::FrontendStatusAtsc3PlpInfo;
+using HidlFrontendAnalogSifStandard =
+        ::android::hardware::tv::tuner::V1_0::FrontendAnalogSifStandard;
+using HidlFrontendAnalogType = ::android::hardware::tv::tuner::V1_0::FrontendAnalogType;
+using HidlFrontendAtscModulation = ::android::hardware::tv::tuner::V1_0::FrontendAtscModulation;
+using HidlFrontendAtsc3Bandwidth = ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Bandwidth;
+using HidlFrontendAtsc3CodeRate = ::android::hardware::tv::tuner::V1_0::FrontendAtsc3CodeRate;
+using HidlFrontendAtsc3DemodOutputFormat =
+        ::android::hardware::tv::tuner::V1_0::FrontendAtsc3DemodOutputFormat;
+using HidlFrontendAtsc3Fec = ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Fec;
+using HidlFrontendAtsc3Modulation = ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Modulation;
+using HidlFrontendAtsc3TimeInterleaveMode =
+        ::android::hardware::tv::tuner::V1_0::FrontendAtsc3TimeInterleaveMode;
+using HidlFrontendDvbcAnnex = ::android::hardware::tv::tuner::V1_0::FrontendDvbcAnnex;
+using HidlFrontendDvbcModulation = ::android::hardware::tv::tuner::V1_0::FrontendDvbcModulation;
+using HidlFrontendDvbcOuterFec = ::android::hardware::tv::tuner::V1_0::FrontendDvbcOuterFec;
+using HidlFrontendDvbcSpectralInversion =
+        ::android::hardware::tv::tuner::V1_0::FrontendDvbcSpectralInversion;
+using HidlFrontendDvbsModulation = ::android::hardware::tv::tuner::V1_0::FrontendDvbsModulation;
+using HidlFrontendDvbsPilot = ::android::hardware::tv::tuner::V1_0::FrontendDvbsPilot;
+using HidlFrontendDvbsRolloff = ::android::hardware::tv::tuner::V1_0::FrontendDvbsRolloff;
+using HidlFrontendDvbsSettings = ::android::hardware::tv::tuner::V1_0::FrontendDvbsSettings;
+using HidlFrontendDvbsStandard = ::android::hardware::tv::tuner::V1_0::FrontendDvbsStandard;
+using HidlFrontendDvbsVcmMode = ::android::hardware::tv::tuner::V1_0::FrontendDvbsVcmMode;
+using HidlFrontendDvbtBandwidth = ::android::hardware::tv::tuner::V1_0::FrontendDvbtBandwidth;
+using HidlFrontendDvbtCoderate = ::android::hardware::tv::tuner::V1_0::FrontendDvbtCoderate;
+using HidlFrontendDvbtConstellation =
+        ::android::hardware::tv::tuner::V1_0::FrontendDvbtConstellation;
+using HidlFrontendDvbtGuardInterval =
+        ::android::hardware::tv::tuner::V1_0::FrontendDvbtGuardInterval;
+using HidlFrontendDvbtHierarchy = ::android::hardware::tv::tuner::V1_0::FrontendDvbtHierarchy;
+using HidlFrontendDvbtPlpMode = ::android::hardware::tv::tuner::V1_0::FrontendDvbtPlpMode;
+using HidlFrontendDvbtSettings = ::android::hardware::tv::tuner::V1_0::FrontendDvbtSettings;
+using HidlFrontendDvbtStandard = ::android::hardware::tv::tuner::V1_0::FrontendDvbtStandard;
+using HidlFrontendDvbtTransmissionMode =
+        ::android::hardware::tv::tuner::V1_0::FrontendDvbtTransmissionMode;
+using HidlFrontendInnerFec = ::android::hardware::tv::tuner::V1_0::FrontendInnerFec;
+using HidlFrontendIsdbs3Coderate = ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Coderate;
+using HidlFrontendIsdbs3Modulation = ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Modulation;
+using HidlFrontendIsdbs3Rolloff = ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Rolloff;
+using HidlFrontendIsdbs3Settings = ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Settings;
+using HidlFrontendIsdbsCoderate = ::android::hardware::tv::tuner::V1_0::FrontendIsdbsCoderate;
+using HidlFrontendIsdbsModulation = ::android::hardware::tv::tuner::V1_0::FrontendIsdbsModulation;
+using HidlFrontendIsdbsRolloff = ::android::hardware::tv::tuner::V1_0::FrontendIsdbsRolloff;
+using HidlFrontendIsdbsSettings = ::android::hardware::tv::tuner::V1_0::FrontendIsdbsSettings;
+using HidlFrontendIsdbsStreamIdType =
+        ::android::hardware::tv::tuner::V1_0::FrontendIsdbsStreamIdType;
+using HidlFrontendIsdbtBandwidth = ::android::hardware::tv::tuner::V1_0::FrontendIsdbtBandwidth;
+using HidlFrontendIsdbtCoderate = ::android::hardware::tv::tuner::V1_0::FrontendIsdbtCoderate;
+using HidlFrontendIsdbtGuardInterval =
+        ::android::hardware::tv::tuner::V1_0::FrontendIsdbtGuardInterval;
+using HidlFrontendIsdbtMode = ::android::hardware::tv::tuner::V1_0::FrontendIsdbtMode;
+using HidlFrontendIsdbtModulation = ::android::hardware::tv::tuner::V1_0::FrontendIsdbtModulation;
+using HidlFrontendIsdbtSettings = ::android::hardware::tv::tuner::V1_0::FrontendIsdbtSettings;
+using HidlFrontendModulationStatus = ::android::hardware::tv::tuner::V1_0::FrontendModulationStatus;
+using HidlFrontendScanAtsc3PlpInfo = ::android::hardware::tv::tuner::V1_0::FrontendScanAtsc3PlpInfo;
+using HidlFrontendScanType = ::android::hardware::tv::tuner::V1_0::FrontendScanType;
+using HidlFrontendStatusType = ::android::hardware::tv::tuner::V1_0::FrontendStatusType;
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+using HidlFrontendAnalogAftFlag = ::android::hardware::tv::tuner::V1_1::FrontendAnalogAftFlag;
+using HidlFrontendBandwidth = ::android::hardware::tv::tuner::V1_1::FrontendBandwidth;
+using HidlFrontendCableTimeInterleaveMode =
+        ::android::hardware::tv::tuner::V1_1::FrontendCableTimeInterleaveMode;
+using HidlFrontendDvbcBandwidth = ::android::hardware::tv::tuner::V1_1::FrontendDvbcBandwidth;
+using HidlFrontendDtmbBandwidth = ::android::hardware::tv::tuner::V1_1::FrontendDtmbBandwidth;
+using HidlFrontendDtmbCodeRate = ::android::hardware::tv::tuner::V1_1::FrontendDtmbCodeRate;
+using HidlFrontendDtmbGuardInterval =
+        ::android::hardware::tv::tuner::V1_1::FrontendDtmbGuardInterval;
+using HidlFrontendDtmbModulation = ::android::hardware::tv::tuner::V1_1::FrontendDtmbModulation;
+using HidlFrontendDtmbTimeInterleaveMode =
+        ::android::hardware::tv::tuner::V1_1::FrontendDtmbTimeInterleaveMode;
+using HidlFrontendDtmbTransmissionMode =
+        ::android::hardware::tv::tuner::V1_1::FrontendDtmbTransmissionMode;
+using HidlFrontendDvbsScanType = ::android::hardware::tv::tuner::V1_1::FrontendDvbsScanType;
+using HidlFrontendGuardInterval = ::android::hardware::tv::tuner::V1_1::FrontendGuardInterval;
+using HidlFrontendInterleaveMode = ::android::hardware::tv::tuner::V1_1::FrontendInterleaveMode;
+using HidlFrontendModulation = ::android::hardware::tv::tuner::V1_1::FrontendModulation;
+using HidlFrontendRollOff = ::android::hardware::tv::tuner::V1_1::FrontendRollOff;
+using HidlFrontendTransmissionMode = ::android::hardware::tv::tuner::V1_1::FrontendTransmissionMode;
+using HidlFrontendSpectralInversion =
+        ::android::hardware::tv::tuner::V1_1::FrontendSpectralInversion;
+using HidlFrontendStatusTypeExt1_1 = ::android::hardware::tv::tuner::V1_1::FrontendStatusTypeExt1_1;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlFrontend::TunerHidlFrontend(sp<HidlIFrontend> frontend, int id) {
+    mFrontend = frontend;
+    mFrontend_1_1 = ::android::hardware::tv::tuner::V1_1::IFrontend::castFrom(mFrontend);
+    mId = id;
+}
+
+TunerHidlFrontend::~TunerHidlFrontend() {
+    mFrontend = nullptr;
+    mFrontend_1_1 = nullptr;
+    mId = -1;
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::setCallback(
+        const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
+    if (mFrontend == nullptr) {
+        ALOGE("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (tunerFrontendCallback == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    sp<HidlIFrontendCallback> frontendCallback = new FrontendCallback(tunerFrontendCallback);
+    HidlResult status = mFrontend->setCallback(frontendCallback);
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::tune(const FrontendSettings& settings) {
+    if (mFrontend == nullptr) {
+        ALOGE("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    HidlFrontendSettings frontendSettings;
+    HidlFrontendSettingsExt1_1 frontendSettingsExt;
+    getHidlFrontendSettings(settings, frontendSettings, frontendSettingsExt);
+    if (mFrontend_1_1 != nullptr) {
+        status = mFrontend_1_1->tune_1_1(frontendSettings, frontendSettingsExt);
+    } else {
+        status = mFrontend->tune(frontendSettings);
+    }
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::stopTune() {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mFrontend->stopTune();
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::scan(const FrontendSettings& settings,
+                                             FrontendScanType frontendScanType) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    HidlFrontendSettings frontendSettings;
+    HidlFrontendSettingsExt1_1 frontendSettingsExt;
+    getHidlFrontendSettings(settings, frontendSettings, frontendSettingsExt);
+    if (mFrontend_1_1 != nullptr) {
+        status = mFrontend_1_1->scan_1_1(frontendSettings,
+                                         static_cast<HidlFrontendScanType>(frontendScanType),
+                                         frontendSettingsExt);
+    } else {
+        status = mFrontend->scan(frontendSettings,
+                                 static_cast<HidlFrontendScanType>(frontendScanType));
+    }
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::stopScan() {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mFrontend->stopScan();
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (lnb == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    HidlResult status = mFrontend->setLnb(static_cast<TunerHidlLnb*>(lnb.get())->getId());
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::linkCiCamToFrontend(int32_t ciCamId,
+                                                            int32_t* _aidl_return) {
+    if (mFrontend_1_1 == nullptr) {
+        ALOGD("IFrontend_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    int ltsId;
+    HidlResult status;
+    mFrontend_1_1->linkCiCam(static_cast<uint32_t>(ciCamId), [&](HidlResult r, uint32_t id) {
+        status = r;
+        ltsId = id;
+    });
+
+    if (status == HidlResult::SUCCESS) {
+        *_aidl_return = ltsId;
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::unlinkCiCamToFrontend(int32_t ciCamId) {
+    if (mFrontend_1_1 == nullptr) {
+        ALOGD("IFrontend_1_1 is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mFrontend_1_1->unlinkCiCam(ciCamId);
+    if (status == HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::close() {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    TunerHidlService::getTunerService()->removeFrontend(this->ref<TunerHidlFrontend>());
+
+    HidlResult status = mFrontend->close();
+    mFrontend = nullptr;
+    mFrontend_1_1 = nullptr;
+
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
+                                                  vector<FrontendStatus>* _aidl_return) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res;
+    vector<HidlFrontendStatus> status;
+    vector<HidlFrontendStatusExt1_1> statusExt;
+    vector<HidlFrontendStatusType> types;
+    vector<HidlFrontendStatusTypeExt1_1> typesExt;
+    for (auto s : in_statusTypes) {
+        if (static_cast<int32_t>(s) <=
+            static_cast<int32_t>(HidlFrontendStatusType::ATSC3_PLP_INFO)) {
+            types.push_back(static_cast<HidlFrontendStatusType>(s));
+        } else {
+            typesExt.push_back(static_cast<HidlFrontendStatusTypeExt1_1>(s));
+        }
+    }
+
+    mFrontend->getStatus(types, [&](HidlResult r, const hidl_vec<HidlFrontendStatus>& ss) {
+        res = r;
+        for (auto s : ss) {
+            status.push_back(s);
+        }
+    });
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    if (mFrontend_1_1 != nullptr) {
+        mFrontend_1_1->getStatusExt1_1(
+                typesExt, [&](HidlResult r, const hidl_vec<HidlFrontendStatusExt1_1>& ss) {
+                    res = r;
+                    for (auto s : ss) {
+                        statusExt.push_back(s);
+                    }
+                });
+        if (res != HidlResult::SUCCESS) {
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+        }
+    }
+
+    getAidlFrontendStatus(status, statusExt, *_aidl_return);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::getFrontendId(int32_t* _aidl_return) {
+    *_aidl_return = mId;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::getHardwareInfo(std::string* _aidl_return) {
+    _aidl_return->clear();
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::removeOutputPid(int32_t /* in_pid */) {
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
+::ndk::ScopedAStatus TunerHidlFrontend::getFrontendStatusReadiness(
+        const std::vector<FrontendStatusType>& /* in_statusTypes */,
+        std::vector<FrontendStatusReadiness>* _aidl_return) {
+    _aidl_return->clear();
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
+void TunerHidlFrontend::setLna(bool bEnable) {
+    if (mFrontend == nullptr) {
+        ALOGD("IFrontend is not initialized");
+        return;
+    }
+
+    mFrontend->setLna(bEnable);
+}
+
+/////////////// FrontendCallback ///////////////////////
+Return<void> TunerHidlFrontend::FrontendCallback::onEvent(HidlFrontendEventType frontendEventType) {
+    ALOGV("FrontendCallback::onEvent, type=%d", frontendEventType);
+    mTunerFrontendCallback->onEvent(static_cast<FrontendEventType>(frontendEventType));
+    return Void();
+}
+
+Return<void> TunerHidlFrontend::FrontendCallback::onScanMessage(
+        HidlFrontendScanMessageType type, const HidlFrontendScanMessage& message) {
+    ALOGV("FrontendCallback::onScanMessage, type=%d", type);
+    FrontendScanMessage scanMessage;
+    switch (type) {
+    case HidlFrontendScanMessageType::LOCKED: {
+        scanMessage.set<FrontendScanMessage::isLocked>(message.isLocked());
+        break;
+    }
+    case HidlFrontendScanMessageType::END: {
+        scanMessage.set<FrontendScanMessage::isEnd>(message.isEnd());
+        break;
+    }
+    case HidlFrontendScanMessageType::PROGRESS_PERCENT: {
+        scanMessage.set<FrontendScanMessage::progressPercent>(message.progressPercent());
+        break;
+    }
+    case HidlFrontendScanMessageType::FREQUENCY: {
+        const vector<uint32_t>& f = message.frequencies();
+        vector<int64_t> lf(begin(f), end(f));
+        scanMessage.set<FrontendScanMessage::frequencies>(lf);
+        break;
+    }
+    case HidlFrontendScanMessageType::SYMBOL_RATE: {
+        const vector<uint32_t>& s = message.symbolRates();
+        vector<int32_t> symbolRates(begin(s), end(s));
+        scanMessage.set<FrontendScanMessage::symbolRates>(symbolRates);
+        break;
+    }
+    case HidlFrontendScanMessageType::HIERARCHY: {
+        scanMessage.set<FrontendScanMessage::hierarchy>(
+                static_cast<FrontendDvbtHierarchy>(message.hierarchy()));
+        break;
+    }
+    case HidlFrontendScanMessageType::ANALOG_TYPE: {
+        scanMessage.set<FrontendScanMessage::analogType>(
+                static_cast<FrontendAnalogType>(message.analogType()));
+        break;
+    }
+    case HidlFrontendScanMessageType::PLP_IDS: {
+        const vector<uint8_t>& p = message.plpIds();
+        vector<int32_t> plpIds(begin(p), end(p));
+        scanMessage.set<FrontendScanMessage::plpIds>(plpIds);
+        break;
+    }
+    case HidlFrontendScanMessageType::GROUP_IDS: {
+        const vector<uint8_t>& g = message.groupIds();
+        vector<int32_t> groupIds(begin(g), end(g));
+        scanMessage.set<FrontendScanMessage::groupIds>(groupIds);
+        break;
+    }
+    case HidlFrontendScanMessageType::INPUT_STREAM_IDS: {
+        const vector<uint16_t>& i = message.inputStreamIds();
+        vector<int32_t> streamIds(begin(i), end(i));
+        scanMessage.set<FrontendScanMessage::inputStreamIds>(streamIds);
+        break;
+    }
+    case HidlFrontendScanMessageType::STANDARD: {
+        const HidlFrontendScanMessage::Standard& std = message.std();
+        FrontendScanMessageStandard standard;
+        if (std.getDiscriminator() == HidlFrontendScanMessage::Standard::hidl_discriminator::sStd) {
+            standard.set<FrontendScanMessageStandard::sStd>(
+                    static_cast<FrontendDvbsStandard>(std.sStd()));
+        } else if (std.getDiscriminator() ==
+                   HidlFrontendScanMessage::Standard::hidl_discriminator::tStd) {
+            standard.set<FrontendScanMessageStandard::tStd>(
+                    static_cast<FrontendDvbtStandard>(std.tStd()));
+        } else if (std.getDiscriminator() ==
+                   HidlFrontendScanMessage::Standard::hidl_discriminator::sifStd) {
+            standard.set<FrontendScanMessageStandard::sifStd>(
+                    static_cast<FrontendAnalogSifStandard>(std.sifStd()));
+        }
+        scanMessage.set<FrontendScanMessage::std>(standard);
+        break;
+    }
+    case HidlFrontendScanMessageType::ATSC3_PLP_INFO: {
+        const vector<HidlFrontendScanAtsc3PlpInfo>& plpInfos = message.atsc3PlpInfos();
+        vector<FrontendScanAtsc3PlpInfo> tunerPlpInfos;
+        for (int i = 0; i < plpInfos.size(); i++) {
+            FrontendScanAtsc3PlpInfo plpInfo{
+                    .plpId = static_cast<int32_t>(plpInfos[i].plpId),
+                    .bLlsFlag = plpInfos[i].bLlsFlag,
+            };
+            tunerPlpInfos.push_back(plpInfo);
+        }
+        scanMessage.set<FrontendScanMessage::atsc3PlpInfos>(tunerPlpInfos);
+        break;
+    }
+    default:
+        break;
+    }
+    mTunerFrontendCallback->onScanMessage(static_cast<FrontendScanMessageType>(type), scanMessage);
+    return Void();
+}
+
+Return<void> TunerHidlFrontend::FrontendCallback::onScanMessageExt1_1(
+        HidlFrontendScanMessageTypeExt1_1 type, const HidlFrontendScanMessageExt1_1& message) {
+    ALOGV("onScanMessageExt1_1::onScanMessage, type=%d", type);
+    FrontendScanMessage scanMessage;
+    switch (type) {
+    case HidlFrontendScanMessageTypeExt1_1::MODULATION: {
+        HidlFrontendModulation m = message.modulation();
+        FrontendModulation modulation;
+        switch (m.getDiscriminator()) {
+        case HidlFrontendModulation::hidl_discriminator::dvbc: {
+            modulation.set<FrontendModulation::dvbc>(static_cast<FrontendDvbcModulation>(m.dvbc()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::dvbt: {
+            modulation.set<FrontendModulation::dvbt>(
+                    static_cast<FrontendDvbtConstellation>(m.dvbt()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::dvbs: {
+            modulation.set<FrontendModulation::dvbs>(static_cast<FrontendDvbsModulation>(m.dvbs()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::isdbs: {
+            modulation.set<FrontendModulation::isdbs>(
+                    static_cast<FrontendIsdbsModulation>(m.isdbs()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::isdbs3: {
+            modulation.set<FrontendModulation::isdbs3>(
+                    static_cast<FrontendIsdbs3Modulation>(m.isdbs3()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::isdbt: {
+            modulation.set<FrontendModulation::isdbt>(
+                    static_cast<FrontendIsdbtModulation>(m.isdbt()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::atsc: {
+            modulation.set<FrontendModulation::atsc>(static_cast<FrontendAtscModulation>(m.atsc()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::atsc3: {
+            modulation.set<FrontendModulation::atsc3>(
+                    static_cast<FrontendAtsc3Modulation>(m.atsc3()));
+            break;
+        }
+        case HidlFrontendModulation::hidl_discriminator::dtmb: {
+            modulation.set<FrontendModulation::dtmb>(static_cast<FrontendDtmbModulation>(m.dtmb()));
+            break;
+        }
+        }
+        scanMessage.set<FrontendScanMessage::modulation>(modulation);
+        break;
+    }
+    case HidlFrontendScanMessageTypeExt1_1::DVBC_ANNEX: {
+        scanMessage.set<FrontendScanMessage::annex>(
+                static_cast<FrontendDvbcAnnex>(message.annex()));
+        break;
+    }
+    case HidlFrontendScanMessageTypeExt1_1::HIGH_PRIORITY: {
+        scanMessage.set<FrontendScanMessage::isHighPriority>(message.isHighPriority());
+        break;
+    }
+    default: {
+        break;
+    }
+    }
+    mTunerFrontendCallback->onScanMessage(static_cast<FrontendScanMessageType>(type), scanMessage);
+    return Void();
+}
+
+/////////////// TunerHidlFrontend Helper Methods ///////////////////////
+void TunerHidlFrontend::getAidlFrontendStatus(const vector<HidlFrontendStatus>& hidlStatus,
+                                              const vector<HidlFrontendStatusExt1_1>& hidlStatusExt,
+                                              vector<FrontendStatus>& aidlStatus) {
+    for (HidlFrontendStatus s : hidlStatus) {
+        FrontendStatus status;
+        switch (s.getDiscriminator()) {
+        case HidlFrontendStatus::hidl_discriminator::isDemodLocked: {
+            status.set<FrontendStatus::isDemodLocked>(s.isDemodLocked());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::snr: {
+            status.set<FrontendStatus::snr>((int)s.snr());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::ber: {
+            status.set<FrontendStatus::ber>((int)s.ber());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::per: {
+            status.set<FrontendStatus::per>((int)s.per());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::preBer: {
+            status.set<FrontendStatus::preBer>((int)s.preBer());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::signalQuality: {
+            status.set<FrontendStatus::signalQuality>((int)s.signalQuality());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::signalStrength: {
+            status.set<FrontendStatus::signalStrength>((int)s.signalStrength());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::symbolRate: {
+            status.set<FrontendStatus::symbolRate>((int)s.symbolRate());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::innerFec: {
+            status.set<FrontendStatus::innerFec>(static_cast<FrontendInnerFec>(s.innerFec()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::modulation: {
+            FrontendModulationStatus modulationStatus;
+            switch (s.modulation().getDiscriminator()) {
+            case HidlFrontendModulationStatus::hidl_discriminator::dvbc:
+                modulationStatus.set<FrontendModulationStatus::dvbc>(
+                        static_cast<FrontendDvbcModulation>(s.modulation().dvbc()));
+                break;
+            case HidlFrontendModulationStatus::hidl_discriminator::dvbs:
+                modulationStatus.set<FrontendModulationStatus::dvbs>(
+                        static_cast<FrontendDvbsModulation>(s.modulation().dvbs()));
+                break;
+            case HidlFrontendModulationStatus::hidl_discriminator::isdbs:
+                modulationStatus.set<FrontendModulationStatus::isdbs>(
+                        static_cast<FrontendIsdbsModulation>(s.modulation().isdbs()));
+                break;
+            case HidlFrontendModulationStatus::hidl_discriminator::isdbs3:
+                modulationStatus.set<FrontendModulationStatus::isdbs3>(
+                        static_cast<FrontendIsdbs3Modulation>(s.modulation().isdbs3()));
+                break;
+            case HidlFrontendModulationStatus::hidl_discriminator::isdbt:
+                modulationStatus.set<FrontendModulationStatus::isdbt>(
+                        static_cast<FrontendIsdbtModulation>(s.modulation().isdbt()));
+                break;
+            }
+            status.set<FrontendStatus::modulationStatus>(modulationStatus);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::inversion: {
+            status.set<FrontendStatus::inversion>(
+                    static_cast<FrontendSpectralInversion>(s.inversion()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::lnbVoltage: {
+            status.set<FrontendStatus::lnbVoltage>(static_cast<LnbVoltage>(s.lnbVoltage()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::plpId: {
+            status.set<FrontendStatus::plpId>((int32_t)s.plpId());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::isEWBS: {
+            status.set<FrontendStatus::isEWBS>(s.isEWBS());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::agc: {
+            status.set<FrontendStatus::agc>((int32_t)s.agc());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::isLnaOn: {
+            status.set<FrontendStatus::isLnaOn>(s.isLnaOn());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::isLayerError: {
+            vector<bool> e(s.isLayerError().begin(), s.isLayerError().end());
+            status.set<FrontendStatus::isLayerError>(e);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::mer: {
+            status.set<FrontendStatus::mer>(static_cast<int32_t>(s.mer()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::freqOffset: {
+            status.set<FrontendStatus::freqOffset>(static_cast<int64_t>(s.freqOffset()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::hierarchy: {
+            status.set<FrontendStatus::hierarchy>(
+                    static_cast<FrontendDvbtHierarchy>(s.hierarchy()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::isRfLocked: {
+            status.set<FrontendStatus::isRfLocked>(s.isRfLocked());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatus::hidl_discriminator::plpInfo: {
+            vector<FrontendStatusAtsc3PlpInfo> info;
+            for (auto i : s.plpInfo()) {
+                info.push_back({
+                        .plpId = static_cast<int32_t>(i.plpId),
+                        .isLocked = i.isLocked,
+                        .uec = static_cast<int32_t>(i.uec),
+                });
+            }
+            status.set<FrontendStatus::plpInfo>(info);
+            aidlStatus.push_back(status);
+            break;
+        }
+        }
+    }
+
+    for (HidlFrontendStatusExt1_1 s : hidlStatusExt) {
+        FrontendStatus status;
+        switch (s.getDiscriminator()) {
+        case HidlFrontendStatusExt1_1::hidl_discriminator::modulations: {
+            vector<FrontendModulation> aidlMod;
+            for (auto m : s.modulations()) {
+                switch (m.getDiscriminator()) {
+                case HidlFrontendModulation::hidl_discriminator::dvbc:
+                    aidlMod.push_back(static_cast<FrontendDvbcModulation>(m.dvbc()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::dvbs:
+                    aidlMod.push_back(static_cast<FrontendDvbsModulation>(m.dvbs()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::dvbt:
+                    aidlMod.push_back(static_cast<FrontendDvbtConstellation>(m.dvbt()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::isdbs:
+                    aidlMod.push_back(static_cast<FrontendIsdbsModulation>(m.isdbs()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::isdbs3:
+                    aidlMod.push_back(static_cast<FrontendIsdbs3Modulation>(m.isdbs3()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::isdbt:
+                    aidlMod.push_back(static_cast<FrontendIsdbtModulation>(m.isdbt()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::atsc:
+                    aidlMod.push_back(static_cast<FrontendAtscModulation>(m.atsc()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::atsc3:
+                    aidlMod.push_back(static_cast<FrontendAtsc3Modulation>(m.atsc3()));
+                    break;
+                case HidlFrontendModulation::hidl_discriminator::dtmb:
+                    aidlMod.push_back(static_cast<FrontendDtmbModulation>(m.dtmb()));
+                    break;
+                }
+            }
+            status.set<FrontendStatus::modulations>(aidlMod);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::bers: {
+            vector<int> b(s.bers().begin(), s.bers().end());
+            status.set<FrontendStatus::bers>(b);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::codeRates: {
+            vector<FrontendInnerFec> codeRates;
+            for (auto c : s.codeRates()) {
+                codeRates.push_back(static_cast<FrontendInnerFec>(c));
+            }
+            status.set<FrontendStatus::codeRates>(codeRates);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::bandwidth: {
+            FrontendBandwidth bandwidth;
+            switch (s.bandwidth().getDiscriminator()) {
+            case HidlFrontendBandwidth::hidl_discriminator::atsc3:
+                bandwidth.set<FrontendBandwidth::atsc3>(
+                        static_cast<FrontendAtsc3Bandwidth>(s.bandwidth().atsc3()));
+                break;
+            case HidlFrontendBandwidth::hidl_discriminator::dvbc:
+                bandwidth.set<FrontendBandwidth::dvbc>(
+                        static_cast<FrontendDvbcBandwidth>(s.bandwidth().dvbc()));
+                break;
+            case HidlFrontendBandwidth::hidl_discriminator::dvbt:
+                bandwidth.set<FrontendBandwidth::dvbt>(
+                        static_cast<FrontendDvbtBandwidth>(s.bandwidth().dvbt()));
+                break;
+            case HidlFrontendBandwidth::hidl_discriminator::isdbt:
+                bandwidth.set<FrontendBandwidth::isdbt>(
+                        static_cast<FrontendIsdbtBandwidth>(s.bandwidth().isdbt()));
+                break;
+            case HidlFrontendBandwidth::hidl_discriminator::dtmb:
+                bandwidth.set<FrontendBandwidth::dtmb>(
+                        static_cast<FrontendDtmbBandwidth>(s.bandwidth().dtmb()));
+                break;
+            }
+            status.set<FrontendStatus::bandwidth>(bandwidth);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::interval: {
+            FrontendGuardInterval interval;
+            switch (s.interval().getDiscriminator()) {
+            case HidlFrontendGuardInterval::hidl_discriminator::dvbt:
+                interval.set<FrontendGuardInterval::dvbt>(
+                        static_cast<FrontendDvbtGuardInterval>(s.interval().dvbt()));
+                break;
+            case HidlFrontendGuardInterval::hidl_discriminator::isdbt:
+                interval.set<FrontendGuardInterval::isdbt>(
+                        static_cast<FrontendIsdbtGuardInterval>(s.interval().isdbt()));
+                break;
+            case HidlFrontendGuardInterval::hidl_discriminator::dtmb:
+                interval.set<FrontendGuardInterval::dtmb>(
+                        static_cast<FrontendDtmbGuardInterval>(s.interval().dtmb()));
+                break;
+            }
+            status.set<FrontendStatus::interval>(interval);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::transmissionMode: {
+            FrontendTransmissionMode transmissionMode;
+            switch (s.transmissionMode().getDiscriminator()) {
+            case HidlFrontendTransmissionMode::hidl_discriminator::dvbt:
+                transmissionMode.set<FrontendTransmissionMode::dvbt>(
+                        static_cast<FrontendDvbtTransmissionMode>(s.transmissionMode().dvbt()));
+                break;
+            case HidlFrontendTransmissionMode::hidl_discriminator::isdbt:
+                transmissionMode.set<FrontendTransmissionMode::isdbt>(
+                        static_cast<FrontendIsdbtMode>(s.transmissionMode().isdbt()));
+                break;
+            case HidlFrontendTransmissionMode::hidl_discriminator::dtmb:
+                transmissionMode.set<FrontendTransmissionMode::dtmb>(
+                        static_cast<FrontendDtmbTransmissionMode>(s.transmissionMode().dtmb()));
+                break;
+            }
+            status.set<FrontendStatus::transmissionMode>(transmissionMode);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::uec: {
+            status.set<FrontendStatus::uec>(static_cast<int32_t>(s.uec()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::systemId: {
+            status.set<FrontendStatus::systemId>(static_cast<int32_t>(s.systemId()));
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::interleaving: {
+            vector<FrontendInterleaveMode> aidlInter;
+            for (auto i : s.interleaving()) {
+                FrontendInterleaveMode leaveMode;
+                switch (i.getDiscriminator()) {
+                case HidlFrontendInterleaveMode::hidl_discriminator::atsc3:
+                    leaveMode.set<FrontendInterleaveMode::atsc3>(
+                            static_cast<FrontendAtsc3TimeInterleaveMode>(i.atsc3()));
+                    break;
+                case HidlFrontendInterleaveMode::hidl_discriminator::dvbc:
+                    leaveMode.set<FrontendInterleaveMode::dvbc>(
+                            static_cast<FrontendCableTimeInterleaveMode>(i.dvbc()));
+                    break;
+                case HidlFrontendInterleaveMode::hidl_discriminator::dtmb:
+                    leaveMode.set<FrontendInterleaveMode::dtmb>(
+                            static_cast<FrontendDtmbTimeInterleaveMode>(i.dtmb()));
+                    break;
+                }
+                aidlInter.push_back(leaveMode);
+            }
+            status.set<FrontendStatus::interleaving>(aidlInter);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::isdbtSegment: {
+            const vector<uint8_t>& seg = s.isdbtSegment();
+            vector<int32_t> i(seg.begin(), seg.end());
+            status.set<FrontendStatus::isdbtSegment>(i);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::tsDataRate: {
+            vector<int32_t> ts(s.tsDataRate().begin(), s.tsDataRate().end());
+            status.set<FrontendStatus::tsDataRate>(ts);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::rollOff: {
+            FrontendRollOff rollOff;
+            switch (s.rollOff().getDiscriminator()) {
+            case HidlFrontendRollOff::hidl_discriminator::dvbs:
+                rollOff.set<FrontendRollOff::dvbs>(
+                        static_cast<FrontendDvbsRolloff>(s.rollOff().dvbs()));
+                break;
+            case HidlFrontendRollOff::hidl_discriminator::isdbs:
+                rollOff.set<FrontendRollOff::isdbs>(
+                        static_cast<FrontendIsdbsRolloff>(s.rollOff().isdbs()));
+                break;
+            case HidlFrontendRollOff::hidl_discriminator::isdbs3:
+                rollOff.set<FrontendRollOff::isdbs3>(
+                        static_cast<FrontendIsdbs3Rolloff>(s.rollOff().isdbs3()));
+                break;
+            }
+            status.set<FrontendStatus::rollOff>(rollOff);
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::isMiso: {
+            status.set<FrontendStatus::isMiso>(s.isMiso());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::isLinear: {
+            status.set<FrontendStatus::isLinear>(s.isLinear());
+            aidlStatus.push_back(status);
+            break;
+        }
+        case HidlFrontendStatusExt1_1::hidl_discriminator::isShortFrames: {
+            status.set<FrontendStatus::isShortFrames>(s.isShortFrames());
+            aidlStatus.push_back(status);
+            break;
+        }
+        }
+    }
+}
+
+hidl_vec<HidlFrontendAtsc3PlpSettings> TunerHidlFrontend::getAtsc3PlpSettings(
+        const FrontendAtsc3Settings& settings) {
+    int len = settings.plpSettings.size();
+    hidl_vec<HidlFrontendAtsc3PlpSettings> plps = hidl_vec<HidlFrontendAtsc3PlpSettings>(len);
+    // parse PLP settings
+    for (int i = 0; i < len; i++) {
+        uint8_t plpId = static_cast<uint8_t>(settings.plpSettings[i].plpId);
+        HidlFrontendAtsc3Modulation modulation =
+                static_cast<HidlFrontendAtsc3Modulation>(settings.plpSettings[i].modulation);
+        HidlFrontendAtsc3TimeInterleaveMode interleaveMode =
+                static_cast<HidlFrontendAtsc3TimeInterleaveMode>(
+                        settings.plpSettings[i].interleaveMode);
+        HidlFrontendAtsc3CodeRate codeRate =
+                static_cast<HidlFrontendAtsc3CodeRate>(settings.plpSettings[i].codeRate);
+        HidlFrontendAtsc3Fec fec = static_cast<HidlFrontendAtsc3Fec>(settings.plpSettings[i].fec);
+        HidlFrontendAtsc3PlpSettings frontendAtsc3PlpSettings{
+                .plpId = plpId,
+                .modulation = modulation,
+                .interleaveMode = interleaveMode,
+                .codeRate = codeRate,
+                .fec = fec,
+        };
+        plps[i] = frontendAtsc3PlpSettings;
+    }
+    return plps;
+}
+
+HidlFrontendDvbsCodeRate TunerHidlFrontend::getDvbsCodeRate(const FrontendDvbsCodeRate& codeRate) {
+    HidlFrontendInnerFec innerFec = static_cast<HidlFrontendInnerFec>(codeRate.fec);
+    bool isLinear = codeRate.isLinear;
+    bool isShortFrames = codeRate.isShortFrames;
+    uint32_t bitsPer1000Symbol = static_cast<uint32_t>(codeRate.bitsPer1000Symbol);
+    HidlFrontendDvbsCodeRate coderate{
+            .fec = innerFec,
+            .isLinear = isLinear,
+            .isShortFrames = isShortFrames,
+            .bitsPer1000Symbol = bitsPer1000Symbol,
+    };
+    return coderate;
+}
+
+void TunerHidlFrontend::getHidlFrontendSettings(const FrontendSettings& aidlSettings,
+                                                HidlFrontendSettings& settings,
+                                                HidlFrontendSettingsExt1_1& settingsExt) {
+    switch (aidlSettings.getTag()) {
+    case FrontendSettings::analog: {
+        const FrontendAnalogSettings& analog = aidlSettings.get<FrontendSettings::analog>();
+        settings.analog({
+                .frequency = static_cast<uint32_t>(analog.frequency),
+                .type = static_cast<HidlFrontendAnalogType>(analog.type),
+                .sifStandard = static_cast<HidlFrontendAnalogSifStandard>(analog.sifStandard),
+        });
+        settingsExt.settingExt.analog({
+                .aftFlag = static_cast<HidlFrontendAnalogAftFlag>(analog.aftFlag),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(analog.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(analog.inversion);
+        break;
+    }
+    case FrontendSettings::atsc: {
+        const FrontendAtscSettings& atsc = aidlSettings.get<FrontendSettings::atsc>();
+        settings.atsc({
+                .frequency = static_cast<uint32_t>(atsc.frequency),
+                .modulation = static_cast<HidlFrontendAtscModulation>(atsc.modulation),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(atsc.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(atsc.inversion);
+        settingsExt.settingExt.noinit();
+        break;
+    }
+    case FrontendSettings::atsc3: {
+        const FrontendAtsc3Settings& atsc3 = aidlSettings.get<FrontendSettings::atsc3>();
+        settings.atsc3({
+                .frequency = static_cast<uint32_t>(atsc3.frequency),
+                .bandwidth = static_cast<HidlFrontendAtsc3Bandwidth>(atsc3.bandwidth),
+                .demodOutputFormat =
+                        static_cast<HidlFrontendAtsc3DemodOutputFormat>(atsc3.demodOutputFormat),
+                .plpSettings = getAtsc3PlpSettings(atsc3),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(atsc3.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(atsc3.inversion);
+        settingsExt.settingExt.noinit();
+        break;
+    }
+    case FrontendSettings::dvbc: {
+        const FrontendDvbcSettings& dvbc = aidlSettings.get<FrontendSettings::dvbc>();
+        settings.dvbc({
+                .frequency = static_cast<uint32_t>(dvbc.frequency),
+                .modulation = static_cast<HidlFrontendDvbcModulation>(dvbc.modulation),
+                .fec = static_cast<HidlFrontendInnerFec>(dvbc.fec),
+                .symbolRate = static_cast<uint32_t>(dvbc.symbolRate),
+                .outerFec = static_cast<HidlFrontendDvbcOuterFec>(dvbc.outerFec),
+                .annex = static_cast<HidlFrontendDvbcAnnex>(dvbc.annex),
+                .spectralInversion = static_cast<HidlFrontendDvbcSpectralInversion>(dvbc.inversion),
+        });
+        settingsExt.settingExt.dvbc({
+                .interleaveMode =
+                        static_cast<HidlFrontendCableTimeInterleaveMode>(dvbc.interleaveMode),
+                .bandwidth = static_cast<HidlFrontendDvbcBandwidth>(dvbc.bandwidth),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(dvbc.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(dvbc.inversion);
+        break;
+    }
+    case FrontendSettings::dvbs: {
+        const FrontendDvbsSettings& dvbs = aidlSettings.get<FrontendSettings::dvbs>();
+        settings.dvbs({
+                .frequency = static_cast<uint32_t>(dvbs.frequency),
+                .modulation = static_cast<HidlFrontendDvbsModulation>(dvbs.modulation),
+                .coderate = getDvbsCodeRate(dvbs.coderate),
+                .symbolRate = static_cast<uint32_t>(dvbs.symbolRate),
+                .rolloff = static_cast<HidlFrontendDvbsRolloff>(dvbs.rolloff),
+                .pilot = static_cast<HidlFrontendDvbsPilot>(dvbs.pilot),
+                .inputStreamId = static_cast<uint32_t>(dvbs.inputStreamId),
+                .standard = static_cast<HidlFrontendDvbsStandard>(dvbs.standard),
+                .vcmMode = static_cast<HidlFrontendDvbsVcmMode>(dvbs.vcmMode),
+        });
+        settingsExt.settingExt.dvbs({
+                .scanType = static_cast<HidlFrontendDvbsScanType>(dvbs.scanType),
+                .isDiseqcRxMessage = dvbs.isDiseqcRxMessage,
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(dvbs.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(dvbs.inversion);
+        break;
+    }
+    case FrontendSettings::dvbt: {
+        const FrontendDvbtSettings& dvbt = aidlSettings.get<FrontendSettings::dvbt>();
+        settings.dvbt({
+                .frequency = static_cast<uint32_t>(dvbt.frequency),
+                .transmissionMode =
+                        static_cast<HidlFrontendDvbtTransmissionMode>(dvbt.transmissionMode),
+                .bandwidth = static_cast<HidlFrontendDvbtBandwidth>(dvbt.bandwidth),
+                .constellation = static_cast<HidlFrontendDvbtConstellation>(dvbt.constellation),
+                .hierarchy = static_cast<HidlFrontendDvbtHierarchy>(dvbt.hierarchy),
+                .hpCoderate = static_cast<HidlFrontendDvbtCoderate>(dvbt.hpCoderate),
+                .lpCoderate = static_cast<HidlFrontendDvbtCoderate>(dvbt.lpCoderate),
+                .guardInterval = static_cast<HidlFrontendDvbtGuardInterval>(dvbt.guardInterval),
+                .isHighPriority = dvbt.isHighPriority,
+                .standard = static_cast<HidlFrontendDvbtStandard>(dvbt.standard),
+                .isMiso = dvbt.isMiso,
+                .plpMode = static_cast<HidlFrontendDvbtPlpMode>(dvbt.plpMode),
+                .plpId = static_cast<uint8_t>(dvbt.plpId),
+                .plpGroupId = static_cast<uint8_t>(dvbt.plpGroupId),
+        });
+        settingsExt.settingExt.dvbt({
+                .constellation = static_cast<
+                        ::android::hardware::tv::tuner::V1_1::FrontendDvbtConstellation>(
+                        dvbt.constellation),
+                .transmissionMode = static_cast<
+                        ::android::hardware::tv::tuner::V1_1::FrontendDvbtTransmissionMode>(
+                        dvbt.transmissionMode),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(dvbt.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(dvbt.inversion);
+        break;
+    }
+    case FrontendSettings::isdbs: {
+        const FrontendIsdbsSettings& isdbs = aidlSettings.get<FrontendSettings::isdbs>();
+        settings.isdbs({
+                .frequency = static_cast<uint32_t>(isdbs.frequency),
+                .streamId = static_cast<uint16_t>(isdbs.streamId),
+                .streamIdType = static_cast<HidlFrontendIsdbsStreamIdType>(isdbs.streamIdType),
+                .modulation = static_cast<HidlFrontendIsdbsModulation>(isdbs.modulation),
+                .coderate = static_cast<HidlFrontendIsdbsCoderate>(isdbs.coderate),
+                .symbolRate = static_cast<uint32_t>(isdbs.symbolRate),
+                .rolloff = static_cast<HidlFrontendIsdbsRolloff>(isdbs.rolloff),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(isdbs.endFrequency);
+        settingsExt.settingExt.noinit();
+        break;
+    }
+    case FrontendSettings::isdbs3: {
+        const FrontendIsdbs3Settings& isdbs3 = aidlSettings.get<FrontendSettings::isdbs3>();
+        settings.isdbs3({
+                .frequency = static_cast<uint32_t>(isdbs3.frequency),
+                .streamId = static_cast<uint16_t>(isdbs3.streamId),
+                .streamIdType = static_cast<HidlFrontendIsdbsStreamIdType>(isdbs3.streamIdType),
+                .modulation = static_cast<HidlFrontendIsdbs3Modulation>(isdbs3.modulation),
+                .coderate = static_cast<HidlFrontendIsdbs3Coderate>(isdbs3.coderate),
+                .symbolRate = static_cast<uint32_t>(isdbs3.symbolRate),
+                .rolloff = static_cast<HidlFrontendIsdbs3Rolloff>(isdbs3.rolloff),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(isdbs3.endFrequency);
+        settingsExt.settingExt.noinit();
+        break;
+    }
+    case FrontendSettings::isdbt: {
+        const FrontendIsdbtSettings& isdbt = aidlSettings.get<FrontendSettings::isdbt>();
+        HidlFrontendIsdbtModulation modulation = HidlFrontendIsdbtModulation::UNDEFINED;
+        HidlFrontendIsdbtCoderate coderate = HidlFrontendIsdbtCoderate::UNDEFINED;
+        if (isdbt.layerSettings.size() > 0) {
+            modulation =
+                    static_cast<HidlFrontendIsdbtModulation>(isdbt.layerSettings[0].modulation);
+            coderate = static_cast<HidlFrontendIsdbtCoderate>(isdbt.layerSettings[0].coderate);
+        }
+        settings.isdbt({
+                .frequency = static_cast<uint32_t>(isdbt.frequency),
+                .modulation = modulation,
+                .bandwidth = static_cast<HidlFrontendIsdbtBandwidth>(isdbt.bandwidth),
+                .mode = static_cast<HidlFrontendIsdbtMode>(isdbt.mode),
+                .coderate = coderate,
+                .guardInterval = static_cast<HidlFrontendIsdbtGuardInterval>(isdbt.guardInterval),
+                .serviceAreaId = static_cast<uint32_t>(isdbt.serviceAreaId),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(isdbt.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(isdbt.inversion);
+        settingsExt.settingExt.noinit();
+        break;
+    }
+    case FrontendSettings::dtmb: {
+        const FrontendDtmbSettings& dtmb = aidlSettings.get<FrontendSettings::dtmb>();
+        settingsExt.settingExt.dtmb({
+                .frequency = static_cast<uint32_t>(dtmb.frequency),
+                .transmissionMode =
+                        static_cast<HidlFrontendDtmbTransmissionMode>(dtmb.transmissionMode),
+                .bandwidth = static_cast<HidlFrontendDtmbBandwidth>(dtmb.bandwidth),
+                .modulation = static_cast<HidlFrontendDtmbModulation>(dtmb.modulation),
+                .codeRate = static_cast<HidlFrontendDtmbCodeRate>(dtmb.codeRate),
+                .guardInterval = static_cast<HidlFrontendDtmbGuardInterval>(dtmb.guardInterval),
+                .interleaveMode =
+                        static_cast<HidlFrontendDtmbTimeInterleaveMode>(dtmb.interleaveMode),
+        });
+        settingsExt.endFrequency = static_cast<uint32_t>(dtmb.endFrequency);
+        settingsExt.inversion = static_cast<HidlFrontendSpectralInversion>(dtmb.inversion);
+        break;
+    }
+    default:
+        break;
+    }
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlFrontend.h b/services/tuner/hidl/TunerHidlFrontend.h
new file mode 100644
index 0000000..f698655
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlFrontend.h
@@ -0,0 +1,129 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLFRONTEND_H
+#define ANDROID_MEDIA_TUNERHIDLFRONTEND_H
+
+#include <aidl/android/hardware/tv/tuner/IFrontendCallback.h>
+#include <aidl/android/media/tv/tuner/BnTunerFrontend.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <android/hardware/tv/tuner/1.1/IFrontend.h>
+#include <android/hardware/tv/tuner/1.1/IFrontendCallback.h>
+#include <utils/Log.h>
+
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3Settings;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbsCodeRate;
+using ::aidl::android::hardware::tv::tuner::FrontendEventType;
+using ::aidl::android::hardware::tv::tuner::FrontendScanMessage;
+using ::aidl::android::hardware::tv::tuner::FrontendScanMessageType;
+using ::aidl::android::hardware::tv::tuner::FrontendScanType;
+using ::aidl::android::hardware::tv::tuner::FrontendSettings;
+using ::aidl::android::hardware::tv::tuner::FrontendStatus;
+using ::aidl::android::hardware::tv::tuner::FrontendStatusReadiness;
+using ::aidl::android::hardware::tv::tuner::FrontendStatusType;
+using ::android::sp;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::std::shared_ptr;
+using ::std::vector;
+
+using HidlFrontendAtsc3PlpSettings = ::android::hardware::tv::tuner::V1_0::FrontendAtsc3PlpSettings;
+using HidlFrontendDvbsCodeRate = ::android::hardware::tv::tuner::V1_0::FrontendDvbsCodeRate;
+using HidlFrontendEventType = ::android::hardware::tv::tuner::V1_0::FrontendEventType;
+using HidlFrontendId = ::android::hardware::tv::tuner::V1_0::FrontendId;
+using HidlFrontendScanMessage = ::android::hardware::tv::tuner::V1_0::FrontendScanMessage;
+using HidlFrontendScanMessageType = ::android::hardware::tv::tuner::V1_0::FrontendScanMessageType;
+using HidlFrontendSettings = ::android::hardware::tv::tuner::V1_0::FrontendSettings;
+using HidlFrontendStatus = ::android::hardware::tv::tuner::V1_0::FrontendStatus;
+using HidlIFrontend = ::android::hardware::tv::tuner::V1_0::IFrontend;
+using HidlIFrontendCallback = ::android::hardware::tv::tuner::V1_1::IFrontendCallback;
+using HidlFrontendScanMessageExt1_1 =
+        ::android::hardware::tv::tuner::V1_1::FrontendScanMessageExt1_1;
+using HidlFrontendScanMessageTypeExt1_1 =
+        ::android::hardware::tv::tuner::V1_1::FrontendScanMessageTypeExt1_1;
+using HidlFrontendSettingsExt1_1 = ::android::hardware::tv::tuner::V1_1::FrontendSettingsExt1_1;
+using HidlFrontendStatusExt1_1 = ::android::hardware::tv::tuner::V1_1::FrontendStatusExt1_1;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+class TunerHidlFrontend : public BnTunerFrontend {
+public:
+    TunerHidlFrontend(sp<HidlIFrontend> frontend, int id);
+    virtual ~TunerHidlFrontend();
+
+    ::ndk::ScopedAStatus setCallback(
+            const shared_ptr<ITunerFrontendCallback>& in_tunerFrontendCallback) override;
+    ::ndk::ScopedAStatus tune(const FrontendSettings& in_settings) override;
+    ::ndk::ScopedAStatus stopTune() override;
+    ::ndk::ScopedAStatus scan(const FrontendSettings& in_settings,
+                              FrontendScanType in_frontendScanType) override;
+    ::ndk::ScopedAStatus stopScan() override;
+    ::ndk::ScopedAStatus setLnb(const shared_ptr<ITunerLnb>& in_lnb) override;
+    ::ndk::ScopedAStatus linkCiCamToFrontend(int32_t in_ciCamId, int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus unlinkCiCamToFrontend(int32_t in_ciCamId) override;
+    ::ndk::ScopedAStatus close() override;
+    ::ndk::ScopedAStatus getStatus(const vector<FrontendStatusType>& in_statusTypes,
+                                   vector<FrontendStatus>* _aidl_return) override;
+    ::ndk::ScopedAStatus getFrontendId(int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getHardwareInfo(std::string* _aidl_return) override;
+    ::ndk::ScopedAStatus removeOutputPid(int32_t in_pid) override;
+    ::ndk::ScopedAStatus getFrontendStatusReadiness(
+            const std::vector<FrontendStatusType>& in_statusTypes,
+            std::vector<FrontendStatusReadiness>* _aidl_return) override;
+
+    void setLna(bool in_bEnable);
+
+    struct FrontendCallback : public HidlIFrontendCallback {
+        FrontendCallback(const shared_ptr<ITunerFrontendCallback> tunerFrontendCallback)
+              : mTunerFrontendCallback(tunerFrontendCallback){};
+
+        virtual Return<void> onEvent(HidlFrontendEventType frontendEventType);
+        virtual Return<void> onScanMessage(HidlFrontendScanMessageType type,
+                                           const HidlFrontendScanMessage& message);
+        virtual Return<void> onScanMessageExt1_1(HidlFrontendScanMessageTypeExt1_1 type,
+                                                 const HidlFrontendScanMessageExt1_1& message);
+
+        shared_ptr<ITunerFrontendCallback> mTunerFrontendCallback;
+    };
+
+private:
+    hidl_vec<HidlFrontendAtsc3PlpSettings> getAtsc3PlpSettings(
+            const FrontendAtsc3Settings& settings);
+    HidlFrontendDvbsCodeRate getDvbsCodeRate(const FrontendDvbsCodeRate& codeRate);
+    void getHidlFrontendSettings(const FrontendSettings& aidlSettings,
+                                 HidlFrontendSettings& settings,
+                                 HidlFrontendSettingsExt1_1& settingsExt);
+    void getAidlFrontendStatus(const vector<HidlFrontendStatus>& hidlStatus,
+                               const vector<HidlFrontendStatusExt1_1>& hidlStatusExt,
+                               vector<FrontendStatus>& aidlStatus);
+
+    int mId;
+    sp<HidlIFrontend> mFrontend;
+    sp<::android::hardware::tv::tuner::V1_1::IFrontend> mFrontend_1_1;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLFRONTEND_H
diff --git a/services/tuner/hidl/TunerHidlLnb.cpp b/services/tuner/hidl/TunerHidlLnb.cpp
new file mode 100644
index 0000000..a7e20bb
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlLnb.cpp
@@ -0,0 +1,160 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerHidlLnb"
+
+#include "TunerHidlLnb.h"
+
+#include <aidl/android/hardware/tv/tuner/Result.h>
+
+using ::aidl::android::hardware::tv::tuner::Result;
+using HidlLnbPosition = ::android::hardware::tv::tuner::V1_0::LnbPosition;
+using HidlLnbTone = ::android::hardware::tv::tuner::V1_0::LnbTone;
+using HidlLnbVoltage = ::android::hardware::tv::tuner::V1_0::LnbVoltage;
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlLnb::TunerHidlLnb(sp<HidlILnb> lnb, int id) {
+    mLnb = lnb;
+    mId = id;
+}
+
+TunerHidlLnb::~TunerHidlLnb() {
+    mLnb = nullptr;
+    mId = -1;
+}
+
+::ndk::ScopedAStatus TunerHidlLnb::setCallback(
+        const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) {
+    if (mLnb == nullptr) {
+        ALOGE("ILnb is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (in_tunerLnbCallback == nullptr) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_ARGUMENT));
+    }
+
+    sp<HidlILnbCallback> lnbCallback = new LnbCallback(in_tunerLnbCallback);
+    HidlResult status = mLnb->setCallback(lnbCallback);
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlLnb::setVoltage(LnbVoltage in_voltage) {
+    if (mLnb == nullptr) {
+        ALOGE("ILnb is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mLnb->setVoltage(static_cast<HidlLnbVoltage>(in_voltage));
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlLnb::setTone(LnbTone in_tone) {
+    if (mLnb == nullptr) {
+        ALOGE("ILnb is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mLnb->setTone(static_cast<HidlLnbTone>(in_tone));
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlLnb::setSatellitePosition(LnbPosition in_position) {
+    if (mLnb == nullptr) {
+        ALOGE("ILnb is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mLnb->setSatellitePosition(static_cast<HidlLnbPosition>(in_position));
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlLnb::sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) {
+    if (mLnb == nullptr) {
+        ALOGE("ILnb is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mLnb->sendDiseqcMessage(in_diseqcMessage);
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlLnb::close() {
+    if (mLnb == nullptr) {
+        ALOGE("ILnb is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mLnb->close();
+    mLnb = nullptr;
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+/////////////// ILnbCallback ///////////////////////
+Return<void> TunerHidlLnb::LnbCallback::onEvent(const HidlLnbEventType lnbEventType) {
+    if (mTunerLnbCallback != nullptr) {
+        mTunerLnbCallback->onEvent(static_cast<LnbEventType>(lnbEventType));
+    }
+    return Void();
+}
+
+Return<void> TunerHidlLnb::LnbCallback::onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage) {
+    if (mTunerLnbCallback != nullptr) {
+        vector<uint8_t> msg(begin(diseqcMessage), end(diseqcMessage));
+        mTunerLnbCallback->onDiseqcMessage(msg);
+    }
+    return Void();
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlLnb.h b/services/tuner/hidl/TunerHidlLnb.h
new file mode 100644
index 0000000..becf848
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlLnb.h
@@ -0,0 +1,83 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLLNB_H
+#define ANDROID_MEDIA_TUNERHIDLLNB_H
+
+#include <aidl/android/hardware/tv/tuner/ILnb.h>
+#include <aidl/android/media/tv/tuner/BnTunerLnb.h>
+#include <android/hardware/tv/tuner/1.0/ILnb.h>
+#include <android/hardware/tv/tuner/1.0/ILnbCallback.h>
+#include <utils/Log.h>
+
+using ::aidl::android::hardware::tv::tuner::LnbEventType;
+using ::aidl::android::hardware::tv::tuner::LnbPosition;
+using ::aidl::android::hardware::tv::tuner::LnbTone;
+using ::aidl::android::hardware::tv::tuner::LnbVoltage;
+using ::android::sp;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::std::shared_ptr;
+using ::std::vector;
+
+using HidlILnb = ::android::hardware::tv::tuner::V1_0::ILnb;
+using HidlILnbCallback = ::android::hardware::tv::tuner::V1_0::ILnbCallback;
+using HidlLnbEventType = ::android::hardware::tv::tuner::V1_0::LnbEventType;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+class TunerHidlLnb : public BnTunerLnb {
+public:
+    TunerHidlLnb(sp<HidlILnb> lnb, int id);
+    virtual ~TunerHidlLnb();
+
+    ::ndk::ScopedAStatus setCallback(
+            const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) override;
+    ::ndk::ScopedAStatus setVoltage(LnbVoltage in_voltage) override;
+    ::ndk::ScopedAStatus setTone(LnbTone in_tone) override;
+    ::ndk::ScopedAStatus setSatellitePosition(LnbPosition in_position) override;
+    ::ndk::ScopedAStatus sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) override;
+    ::ndk::ScopedAStatus close() override;
+
+    int getId() { return mId; }
+
+    struct LnbCallback : public HidlILnbCallback {
+        LnbCallback(const shared_ptr<ITunerLnbCallback> tunerLnbCallback)
+              : mTunerLnbCallback(tunerLnbCallback){};
+
+        virtual Return<void> onEvent(const HidlLnbEventType lnbEventType);
+        virtual Return<void> onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage);
+
+        shared_ptr<ITunerLnbCallback> mTunerLnbCallback;
+    };
+
+private:
+    int mId;
+    sp<HidlILnb> mLnb;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLLNB_H
diff --git a/services/tuner/hidl/TunerHidlService.cpp b/services/tuner/hidl/TunerHidlService.cpp
new file mode 100644
index 0000000..6f55f1e
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlService.cpp
@@ -0,0 +1,711 @@
+/**
+ * Copyright (c) 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TunerHidlService"
+
+#include "TunerHidlService.h"
+
+#include <aidl/android/hardware/tv/tuner/FrontendIsdbtTimeInterleaveMode.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+#include <android/binder_manager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/PermissionCache.h>
+#include <utils/Log.h>
+
+#include "TunerHelper.h"
+#include "TunerHidlDemux.h"
+#include "TunerHidlDescrambler.h"
+#include "TunerHidlFrontend.h"
+#include "TunerHidlLnb.h"
+
+using ::aidl::android::hardware::tv::tuner::FrontendAnalogCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendAtsc3Capabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendAtscCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendDtmbCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbcCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbsCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendDvbtCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbs3Capabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbsCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtCapabilities;
+using ::aidl::android::hardware::tv::tuner::FrontendIsdbtTimeInterleaveMode;
+using ::aidl::android::hardware::tv::tuner::FrontendType;
+using ::aidl::android::hardware::tv::tuner::Result;
+using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
+using ::android::IPCThreadState;
+using ::android::PermissionCache;
+using ::android::hardware::hidl_vec;
+
+using HidlFrontendId = ::android::hardware::tv::tuner::V1_0::FrontendId;
+using HidlLnbId = ::android::hardware::tv::tuner::V1_0::LnbId;
+using HidlFrontendType = ::android::hardware::tv::tuner::V1_1::FrontendType;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+shared_ptr<TunerHidlService> TunerHidlService::sTunerService = nullptr;
+
+TunerHidlService::TunerHidlService() {
+    if (!TunerHelper::checkTunerFeature()) {
+        ALOGD("Device doesn't have tuner hardware.");
+        return;
+    }
+
+    updateTunerResources();
+}
+
+TunerHidlService::~TunerHidlService() {
+    mOpenedFrontends.clear();
+    mLnaStatus = -1;
+}
+
+binder_status_t TunerHidlService::instantiate() {
+    if (HidlITuner::getService() == nullptr) {
+        ALOGD("Failed to get ITuner HIDL HAL");
+        return STATUS_NAME_NOT_FOUND;
+    }
+
+    sTunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
+    return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
+}
+
+shared_ptr<TunerHidlService> TunerHidlService::getTunerService() {
+    return sTunerService;
+}
+
+bool TunerHidlService::hasITuner() {
+    ALOGV("hasITuner");
+    if (mTuner != nullptr) {
+        return true;
+    }
+
+    mTuner = HidlITuner::getService();
+    if (mTuner == nullptr) {
+        ALOGE("Failed to get ITuner service");
+        return false;
+    }
+    mTunerVersion = TUNER_HAL_VERSION_1_0;
+
+    mTuner_1_1 = ::android::hardware::tv::tuner::V1_1::ITuner::castFrom(mTuner);
+    if (mTuner_1_1 != nullptr) {
+        mTunerVersion = TUNER_HAL_VERSION_1_1;
+    } else {
+        ALOGD("Failed to get ITuner_1_1 service");
+    }
+
+    return true;
+}
+
+bool TunerHidlService::hasITuner_1_1() {
+    ALOGV("hasITuner_1_1");
+    hasITuner();
+    return (mTunerVersion == TUNER_HAL_VERSION_1_1);
+}
+
+::ndk::ScopedAStatus TunerHidlService::openDemux(int32_t /* in_demuxHandle */,
+                                                 shared_ptr<ITunerDemux>* _aidl_return) {
+    ALOGV("openDemux");
+    if (!hasITuner()) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res;
+    uint32_t id;
+    sp<IDemux> demuxSp = nullptr;
+    mTuner->openDemux([&](HidlResult r, uint32_t demuxId, const sp<IDemux>& demux) {
+        demuxSp = demux;
+        id = demuxId;
+        res = r;
+        ALOGD("open demux, id = %d", demuxId);
+    });
+    if (res == HidlResult::SUCCESS) {
+        *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id);
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    ALOGW("open demux failed, res = %d", res);
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
+::ndk::ScopedAStatus TunerHidlService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
+    ALOGV("getDemuxCaps");
+    if (!hasITuner()) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res;
+    HidlDemuxCapabilities caps;
+    mTuner->getDemuxCaps([&](HidlResult r, const HidlDemuxCapabilities& demuxCaps) {
+        caps = demuxCaps;
+        res = r;
+    });
+    if (res == HidlResult::SUCCESS) {
+        *_aidl_return = getAidlDemuxCaps(caps);
+        return ::ndk::ScopedAStatus::ok();
+    }
+
+    ALOGW("Get demux caps failed, res = %d", res);
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
+::ndk::ScopedAStatus TunerHidlService::getFrontendIds(vector<int32_t>* ids) {
+    if (!hasITuner()) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    hidl_vec<HidlFrontendId> feIds;
+    HidlResult res = getHidlFrontendIds(feIds);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    ids->resize(feIds.size());
+    copy(feIds.begin(), feIds.end(), ids->begin());
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("ITuner service is not init.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlFrontendInfo info;
+    HidlResult res = getHidlFrontendInfo(id, info);
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+
+    HidlFrontendDtmbCapabilities dtmbCaps;
+    if (static_cast<HidlFrontendType>(info.type) == HidlFrontendType::DTMB) {
+        if (!hasITuner_1_1()) {
+            ALOGE("ITuner_1_1 service is not init.");
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                    static_cast<int32_t>(Result::UNAVAILABLE));
+        }
+
+        mTuner_1_1->getFrontendDtmbCapabilities(
+                id, [&](HidlResult r, const HidlFrontendDtmbCapabilities& caps) {
+                    dtmbCaps = caps;
+                    res = r;
+                });
+        if (res != HidlResult::SUCCESS) {
+            return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+        }
+    }
+
+    *_aidl_return = getAidlFrontendInfo(info, dtmbCaps);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::openFrontend(int32_t frontendHandle,
+                                                    shared_ptr<ITunerFrontend>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("ITuner service is not init.");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    sp<HidlIFrontend> frontend;
+    int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
+    mTuner->openFrontendById(id, [&](HidlResult result, const sp<HidlIFrontend>& fe) {
+        frontend = fe;
+        status = result;
+    });
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    shared_ptr<TunerHidlFrontend> tunerFrontend =
+            ::ndk::SharedRefBase::make<TunerHidlFrontend>(frontend, id);
+    if (mLnaStatus != -1) {
+        tunerFrontend->setLna(mLnaStatus == 1);
+    }
+    {
+        Mutex::Autolock _l(mOpenedFrontendsLock);
+        mOpenedFrontends.insert(tunerFrontend);
+    }
+    *_aidl_return = tunerFrontend;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    sp<HidlILnb> lnb;
+    int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
+    mTuner->openLnbById(id, [&](HidlResult result, const sp<HidlILnb>& lnbSp) {
+        lnb = lnbSp;
+        status = result;
+    });
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlLnb>(lnb, id);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::openLnbByName(const string& lnbName,
+                                                     shared_ptr<ITunerLnb>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    int lnbId;
+    HidlResult status;
+    sp<HidlILnb> lnb;
+    mTuner->openLnbByName(lnbName, [&](HidlResult r, HidlLnbId id, const sp<HidlILnb>& lnbSp) {
+        status = r;
+        lnb = lnbSp;
+        lnbId = static_cast<int32_t>(id);
+    });
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlLnb>(lnb, lnbId);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::openDescrambler(
+        int32_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGD("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    sp<HidlIDescrambler> descrambler;
+    //int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
+    mTuner->openDescrambler([&](HidlResult r, const sp<HidlIDescrambler>& descramblerSp) {
+        status = r;
+        descrambler = descramblerSp;
+    });
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+
+    *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDescrambler>(descrambler);
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::getTunerHalVersion(int* _aidl_return) {
+    hasITuner();
+    *_aidl_return = mTunerVersion;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::openSharedFilter(
+        const string& in_filterToken, const shared_ptr<ITunerFilterCallback>& in_cb,
+        shared_ptr<ITunerFilter>* _aidl_return) {
+    if (!hasITuner()) {
+        ALOGE("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    if (!PermissionCache::checkCallingPermission(sSharedFilterPermission)) {
+        ALOGE("Request requires android.permission.ACCESS_TV_SHARED_FILTER");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    Mutex::Autolock _l(mSharedFiltersLock);
+    if (mSharedFilters.find(in_filterToken) == mSharedFilters.end()) {
+        *_aidl_return = nullptr;
+        ALOGD("fail to find %s", in_filterToken.c_str());
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    shared_ptr<TunerHidlFilter> filter = mSharedFilters.at(in_filterToken);
+    IPCThreadState* ipc = IPCThreadState::self();
+    const int pid = ipc->getCallingPid();
+    if (!filter->isSharedFilterAllowed(pid)) {
+        *_aidl_return = nullptr;
+        ALOGD("shared filter %s is opened in the same process", in_filterToken.c_str());
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::INVALID_STATE));
+    }
+
+    filter->attachSharedFilterCallback(in_cb);
+
+    *_aidl_return = filter;
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::setLna(bool bEnable) {
+    if (!hasITuner()) {
+        ALOGE("get ITuner failed");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    mLnaStatus = bEnable ? 1 : 0;
+
+    {
+        Mutex::Autolock _l(mOpenedFrontendsLock);
+        for (auto it = mOpenedFrontends.begin(); it != mOpenedFrontends.end(); ++it) {
+            (*it)->setLna(mLnaStatus == 1);
+        }
+    }
+
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlService::setMaxNumberOfFrontends(FrontendType /* in_frontendType */,
+                                                               int32_t /* in_maxNumber */) {
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
+::ndk::ScopedAStatus TunerHidlService::getMaxNumberOfFrontends(FrontendType /* in_frontendType */,
+                                                               int32_t* _aidl_return) {
+    *_aidl_return = -1;
+    return ::ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
+string TunerHidlService::addFilterToShared(const shared_ptr<TunerHidlFilter>& sharedFilter) {
+    Mutex::Autolock _l(mSharedFiltersLock);
+
+    // Use sharedFilter address as token.
+    string token = to_string(reinterpret_cast<std::uintptr_t>(sharedFilter.get()));
+    mSharedFilters[token] = sharedFilter;
+
+    return token;
+}
+
+void TunerHidlService::removeSharedFilter(const shared_ptr<TunerHidlFilter>& sharedFilter) {
+    Mutex::Autolock _l(mSharedFiltersLock);
+
+    // Use sharedFilter address as token.
+    mSharedFilters.erase(to_string(reinterpret_cast<std::uintptr_t>(sharedFilter.get())));
+}
+
+void TunerHidlService::removeFrontend(const shared_ptr<TunerHidlFrontend>& frontend) {
+    Mutex::Autolock _l(mOpenedFrontendsLock);
+    for (auto it = mOpenedFrontends.begin(); it != mOpenedFrontends.end(); ++it) {
+        if (it->get() == frontend.get()) {
+            mOpenedFrontends.erase(it);
+            break;
+        }
+    }
+}
+
+void TunerHidlService::updateTunerResources() {
+    if (!hasITuner()) {
+        ALOGE("Failed to updateTunerResources");
+        return;
+    }
+
+    TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
+}
+
+vector<TunerFrontendInfo> TunerHidlService::getTRMFrontendInfos() {
+    vector<TunerFrontendInfo> infos;
+    hidl_vec<HidlFrontendId> ids;
+    HidlResult res = getHidlFrontendIds(ids);
+    if (res != HidlResult::SUCCESS) {
+        return infos;
+    }
+
+    for (int i = 0; i < ids.size(); i++) {
+        HidlFrontendInfo frontendInfo;
+        HidlResult res = getHidlFrontendInfo(static_cast<int32_t>(ids[i]), frontendInfo);
+        if (res != HidlResult::SUCCESS) {
+            continue;
+        }
+        TunerFrontendInfo tunerFrontendInfo{
+                .handle = TunerHelper::getResourceHandleFromId(static_cast<int32_t>(ids[i]),
+                                                               FRONTEND),
+                .type = static_cast<int32_t>(frontendInfo.type),
+                .exclusiveGroupId = static_cast<int32_t>(frontendInfo.exclusiveGroupId),
+        };
+        infos.push_back(tunerFrontendInfo);
+    }
+
+    return infos;
+}
+
+vector<int32_t> TunerHidlService::getTRMLnbHandles() {
+    vector<int32_t> lnbHandles;
+    if (mTuner != nullptr) {
+        HidlResult res;
+        vector<HidlLnbId> lnbIds;
+        mTuner->getLnbIds([&](HidlResult r, const hidl_vec<HidlLnbId>& ids) {
+            lnbIds = ids;
+            res = r;
+        });
+        if (res == HidlResult::SUCCESS && lnbIds.size() > 0) {
+            for (int i = 0; i < lnbIds.size(); i++) {
+                lnbHandles.push_back(
+                        TunerHelper::getResourceHandleFromId(static_cast<int32_t>(lnbIds[i]), LNB));
+            }
+        }
+    }
+
+    return lnbHandles;
+}
+
+HidlResult TunerHidlService::getHidlFrontendIds(hidl_vec<HidlFrontendId>& ids) {
+    if (mTuner == nullptr) {
+        return HidlResult::NOT_INITIALIZED;
+    }
+    HidlResult res;
+    mTuner->getFrontendIds([&](HidlResult r, const hidl_vec<HidlFrontendId>& frontendIds) {
+        ids = frontendIds;
+        res = r;
+    });
+    return res;
+}
+
+HidlResult TunerHidlService::getHidlFrontendInfo(const int id, HidlFrontendInfo& info) {
+    if (mTuner == nullptr) {
+        return HidlResult::NOT_INITIALIZED;
+    }
+    HidlResult res;
+    mTuner->getFrontendInfo(id, [&](HidlResult r, const HidlFrontendInfo& feInfo) {
+        info = feInfo;
+        res = r;
+    });
+    return res;
+}
+
+DemuxCapabilities TunerHidlService::getAidlDemuxCaps(const HidlDemuxCapabilities& caps) {
+    DemuxCapabilities aidlCaps{
+            .numDemux = static_cast<int32_t>(caps.numDemux),
+            .numRecord = static_cast<int32_t>(caps.numRecord),
+            .numPlayback = static_cast<int32_t>(caps.numPlayback),
+            .numTsFilter = static_cast<int32_t>(caps.numTsFilter),
+            .numSectionFilter = static_cast<int32_t>(caps.numSectionFilter),
+            .numAudioFilter = static_cast<int32_t>(caps.numAudioFilter),
+            .numVideoFilter = static_cast<int32_t>(caps.numVideoFilter),
+            .numPesFilter = static_cast<int32_t>(caps.numPesFilter),
+            .numPcrFilter = static_cast<int32_t>(caps.numPcrFilter),
+            .numBytesInSectionFilter = static_cast<int64_t>(caps.numBytesInSectionFilter),
+            .filterCaps = static_cast<int32_t>(caps.filterCaps),
+            .bTimeFilter = caps.bTimeFilter,
+    };
+    aidlCaps.linkCaps.resize(caps.linkCaps.size());
+    copy(caps.linkCaps.begin(), caps.linkCaps.end(), aidlCaps.linkCaps.begin());
+    return aidlCaps;
+}
+
+FrontendInfo TunerHidlService::getAidlFrontendInfo(
+        const HidlFrontendInfo& halInfo, const HidlFrontendDtmbCapabilities& halDtmbCaps) {
+    FrontendInfo info{
+            .type = static_cast<FrontendType>(halInfo.type),
+            .minFrequency = static_cast<int64_t>(halInfo.minFrequency),
+            .maxFrequency = static_cast<int64_t>(halInfo.maxFrequency),
+            .minSymbolRate = static_cast<int32_t>(halInfo.minSymbolRate),
+            .maxSymbolRate = static_cast<int32_t>(halInfo.maxSymbolRate),
+            .acquireRange = static_cast<int64_t>(halInfo.acquireRange),
+            .exclusiveGroupId = static_cast<int32_t>(halInfo.exclusiveGroupId),
+    };
+    for (int i = 0; i < halInfo.statusCaps.size(); i++) {
+        info.statusCaps.push_back(static_cast<FrontendStatusType>(halInfo.statusCaps[i]));
+    }
+
+    FrontendCapabilities caps;
+    switch (halInfo.type) {
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::ANALOG: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::analogCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendAnalogCapabilities analogCaps{
+                    .typeCap = static_cast<int32_t>(halInfo.frontendCaps.analogCaps().typeCap),
+                    .sifStandardCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.analogCaps().sifStandardCap),
+            };
+            caps.set<FrontendCapabilities::analogCaps>(analogCaps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::ATSC: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::atscCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendAtscCapabilities atscCaps{
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.atscCaps().modulationCap),
+            };
+            caps.set<FrontendCapabilities::atscCaps>(atscCaps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::ATSC3: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::atsc3Caps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendAtsc3Capabilities atsc3Caps{
+                    .bandwidthCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.atsc3Caps().bandwidthCap),
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.atsc3Caps().modulationCap),
+                    .timeInterleaveModeCap = static_cast<int32_t>(
+                            halInfo.frontendCaps.atsc3Caps().timeInterleaveModeCap),
+                    .codeRateCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.atsc3Caps().codeRateCap),
+                    .demodOutputFormatCap = static_cast<int8_t>(
+                            halInfo.frontendCaps.atsc3Caps().demodOutputFormatCap),
+                    .fecCap = static_cast<int32_t>(halInfo.frontendCaps.atsc3Caps().fecCap),
+            };
+            caps.set<FrontendCapabilities::atsc3Caps>(atsc3Caps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::DVBC: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::dvbcCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendDvbcCapabilities dvbcCaps{
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbcCaps().modulationCap),
+                    .fecCap = static_cast<int64_t>(halInfo.frontendCaps.dvbcCaps().fecCap),
+                    .annexCap = static_cast<int8_t>(halInfo.frontendCaps.dvbcCaps().annexCap),
+            };
+            caps.set<FrontendCapabilities::dvbcCaps>(dvbcCaps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::DVBS: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::dvbsCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendDvbsCapabilities dvbsCaps{
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbsCaps().modulationCap),
+                    .innerfecCap =
+                            static_cast<int64_t>(halInfo.frontendCaps.dvbsCaps().innerfecCap),
+                    .standard = static_cast<int8_t>(halInfo.frontendCaps.dvbsCaps().standard),
+            };
+            caps.set<FrontendCapabilities::dvbsCaps>(dvbsCaps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::DVBT: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::dvbtCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendDvbtCapabilities dvbtCaps{
+                    .transmissionModeCap = static_cast<int32_t>(
+                            halInfo.frontendCaps.dvbtCaps().transmissionModeCap),
+                    .bandwidthCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbtCaps().bandwidthCap),
+                    .constellationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbtCaps().constellationCap),
+                    .coderateCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbtCaps().coderateCap),
+                    .hierarchyCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbtCaps().hierarchyCap),
+                    .guardIntervalCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.dvbtCaps().guardIntervalCap),
+                    .isT2Supported = halInfo.frontendCaps.dvbtCaps().isT2Supported,
+                    .isMisoSupported = halInfo.frontendCaps.dvbtCaps().isMisoSupported,
+            };
+            caps.set<FrontendCapabilities::dvbtCaps>(dvbtCaps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::ISDBS: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::isdbsCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendIsdbsCapabilities isdbsCaps{
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbsCaps().modulationCap),
+                    .coderateCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbsCaps().coderateCap),
+            };
+            caps.set<FrontendCapabilities::isdbsCaps>(isdbsCaps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::ISDBS3: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::isdbs3Caps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendIsdbs3Capabilities isdbs3Caps{
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbs3Caps().modulationCap),
+                    .coderateCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbs3Caps().coderateCap),
+            };
+            caps.set<FrontendCapabilities::isdbs3Caps>(isdbs3Caps);
+        }
+        break;
+    }
+    case ::android::hardware::tv::tuner::V1_0::FrontendType::ISDBT: {
+        if (HidlFrontendInfo::FrontendCapabilities::hidl_discriminator::isdbtCaps ==
+            halInfo.frontendCaps.getDiscriminator()) {
+            FrontendIsdbtCapabilities isdbtCaps{
+                    .modeCap = static_cast<int32_t>(halInfo.frontendCaps.isdbtCaps().modeCap),
+                    .bandwidthCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbtCaps().bandwidthCap),
+                    .modulationCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbtCaps().modulationCap),
+                    .coderateCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbtCaps().coderateCap),
+                    .guardIntervalCap =
+                            static_cast<int32_t>(halInfo.frontendCaps.isdbtCaps().guardIntervalCap),
+                    .timeInterleaveCap =
+                            static_cast<int32_t>(FrontendIsdbtTimeInterleaveMode::UNDEFINED),
+                    .isSegmentAuto = false,
+                    .isFullSegment = false,
+            };
+            caps.set<FrontendCapabilities::isdbtCaps>(isdbtCaps);
+        }
+        break;
+    }
+    default: {
+        if (static_cast<HidlFrontendType>(info.type) == HidlFrontendType::DTMB) {
+            FrontendDtmbCapabilities dtmbCaps{
+                    .transmissionModeCap = static_cast<int32_t>(halDtmbCaps.transmissionModeCap),
+                    .bandwidthCap = static_cast<int32_t>(halDtmbCaps.bandwidthCap),
+                    .modulationCap = static_cast<int32_t>(halDtmbCaps.modulationCap),
+                    .codeRateCap = static_cast<int32_t>(halDtmbCaps.codeRateCap),
+                    .guardIntervalCap = static_cast<int32_t>(halDtmbCaps.guardIntervalCap),
+                    .interleaveModeCap = static_cast<int32_t>(halDtmbCaps.interleaveModeCap),
+            };
+            caps.set<FrontendCapabilities::dtmbCaps>(dtmbCaps);
+        }
+        break;
+    }
+    }
+
+    info.frontendCaps = caps;
+    return info;
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlService.h b/services/tuner/hidl/TunerHidlService.h
new file mode 100644
index 0000000..2252d35
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlService.h
@@ -0,0 +1,134 @@
+/**
+ * Copyright (c) 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLSERVICE_H
+#define ANDROID_MEDIA_TUNERHIDLSERVICE_H
+
+#include <aidl/android/hardware/tv/tuner/DemuxFilterEvent.h>
+#include <aidl/android/hardware/tv/tuner/DemuxFilterStatus.h>
+#include <aidl/android/media/tv/tuner/BnTunerService.h>
+#include <aidl/android/media/tv/tunerresourcemanager/TunerFrontendInfo.h>
+#include <android/hardware/tv/tuner/1.1/ITuner.h>
+#include <utils/Mutex.h>
+
+#include <unordered_set>
+
+#include "TunerHelper.h"
+#include "TunerHidlFilter.h"
+#include "TunerHidlFrontend.h"
+
+using ::aidl::android::hardware::tv::tuner::DemuxCapabilities;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
+using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::FrontendInfo;
+using ::aidl::android::hardware::tv::tuner::FrontendType;
+using ::aidl::android::media::tv::tuner::ITunerDemux;
+using ::aidl::android::media::tv::tuner::ITunerDescrambler;
+using ::aidl::android::media::tv::tuner::ITunerFrontend;
+using ::aidl::android::media::tv::tuner::ITunerLnb;
+using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
+using ::android::Mutex;
+using ::android::sp;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::std::shared_ptr;
+using ::std::string;
+using ::std::vector;
+
+using HidlFrontendDtmbCapabilities = ::android::hardware::tv::tuner::V1_1::FrontendDtmbCapabilities;
+using HidlDemuxFilterEvent = ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
+using HidlDemuxFilterStatus = ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
+using HidlDemuxCapabilities = ::android::hardware::tv::tuner::V1_0::DemuxCapabilities;
+using HidlFrontendInfo = ::android::hardware::tv::tuner::V1_0::FrontendInfo;
+using HidlITuner = ::android::hardware::tv::tuner::V1_0::ITuner;
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+using HidlFrontendId = ::android::hardware::tv::tuner::V1_0::FrontendId;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+class TunerHidlService : public BnTunerService {
+public:
+    static char const* getServiceName() { return "media.tuner"; }
+    static binder_status_t instantiate();
+    TunerHidlService();
+    virtual ~TunerHidlService();
+
+    ::ndk::ScopedAStatus getFrontendIds(vector<int32_t>* out_ids) override;
+    ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendHandle,
+                                         FrontendInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus openFrontend(int32_t in_frontendHandle,
+                                      shared_ptr<ITunerFrontend>* _aidl_return) override;
+    ::ndk::ScopedAStatus openLnb(int32_t in_lnbHandle,
+                                 shared_ptr<ITunerLnb>* _aidl_return) override;
+    ::ndk::ScopedAStatus openLnbByName(const std::string& in_lnbName,
+                                       shared_ptr<ITunerLnb>* _aidl_return) override;
+    ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
+                                   shared_ptr<ITunerDemux>* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
+    ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
+                                         shared_ptr<ITunerDescrambler>* _aidl_return) override;
+    ::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
+    ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
+                                          const shared_ptr<ITunerFilterCallback>& in_cb,
+                                          shared_ptr<ITunerFilter>* _aidl_return) override;
+    ::ndk::ScopedAStatus setLna(bool in_bEnable) override;
+    ::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
+                                                 int32_t in_maxNumber) override;
+    ::ndk::ScopedAStatus getMaxNumberOfFrontends(FrontendType in_frontendType,
+                                                 int32_t* _aidl_return) override;
+
+    string addFilterToShared(const shared_ptr<TunerHidlFilter>& sharedFilter);
+    void removeSharedFilter(const shared_ptr<TunerHidlFilter>& sharedFilter);
+    void removeFrontend(const shared_ptr<TunerHidlFrontend>& frontend);
+
+    static shared_ptr<TunerHidlService> getTunerService();
+
+private:
+    bool hasITuner();
+    bool hasITuner_1_1();
+    void updateTunerResources();
+    vector<TunerFrontendInfo> getTRMFrontendInfos();
+    vector<int32_t> getTRMLnbHandles();
+    HidlResult getHidlFrontendIds(hidl_vec<HidlFrontendId>& ids);
+    HidlResult getHidlFrontendInfo(const int id, HidlFrontendInfo& info);
+    DemuxCapabilities getAidlDemuxCaps(const HidlDemuxCapabilities& caps);
+    FrontendInfo getAidlFrontendInfo(const HidlFrontendInfo& halInfo,
+                                     const HidlFrontendDtmbCapabilities& dtmbCaps);
+
+    sp<HidlITuner> mTuner;
+    sp<::android::hardware::tv::tuner::V1_1::ITuner> mTuner_1_1;
+    int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
+    Mutex mSharedFiltersLock;
+    map<string, shared_ptr<TunerHidlFilter>> mSharedFilters;
+    Mutex mOpenedFrontendsLock;
+    unordered_set<shared_ptr<TunerHidlFrontend>> mOpenedFrontends;
+    int mLnaStatus = -1;
+
+    static shared_ptr<TunerHidlService> sTunerService;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLSERVICE_H
diff --git a/services/tuner/hidl/TunerHidlTimeFilter.cpp b/services/tuner/hidl/TunerHidlTimeFilter.cpp
new file mode 100644
index 0000000..d0606d6
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlTimeFilter.cpp
@@ -0,0 +1,133 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TunerHidlTimeFilter"
+
+#include "TunerHidlTimeFilter.h"
+
+#include <aidl/android/hardware/tv/tuner/Constant64Bit.h>
+#include <aidl/android/hardware/tv/tuner/Result.h>
+
+using ::aidl::android::hardware::tv::tuner::Constant64Bit;
+using ::aidl::android::hardware::tv::tuner::Result;
+
+using HidlResult = ::android::hardware::tv::tuner::V1_0::Result;
+
+using namespace std;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+TunerHidlTimeFilter::TunerHidlTimeFilter(sp<HidlITimeFilter> timeFilter) {
+    mTimeFilter = timeFilter;
+}
+
+TunerHidlTimeFilter::~TunerHidlTimeFilter() {
+    mTimeFilter = nullptr;
+}
+
+::ndk::ScopedAStatus TunerHidlTimeFilter::setTimeStamp(int64_t timeStamp) {
+    if (mTimeFilter == nullptr) {
+        ALOGE("ITimeFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mTimeFilter->setTimeStamp(static_cast<uint64_t>(timeStamp));
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlTimeFilter::clearTimeStamp() {
+    if (mTimeFilter == nullptr) {
+        ALOGE("ITimeFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status = mTimeFilter->clearTimeStamp();
+    if (status != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlTimeFilter::getSourceTime(int64_t* _aidl_return) {
+    if (mTimeFilter == nullptr) {
+        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        ALOGE("ITimeFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    mTimeFilter->getSourceTime([&](HidlResult r, uint64_t t) {
+        status = r;
+        *_aidl_return = static_cast<int64_t>(t);
+    });
+    if (status != HidlResult::SUCCESS) {
+        *_aidl_return = static_cast<int64_t>(Constant64Bit::INVALID_PRESENTATION_TIME_STAMP);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlTimeFilter::getTimeStamp(int64_t* _aidl_return) {
+    if (mTimeFilter == nullptr) {
+        *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
+        ALOGE("ITimeFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult status;
+    mTimeFilter->getTimeStamp([&](HidlResult r, uint64_t t) {
+        status = r;
+        *_aidl_return = static_cast<int64_t>(t);
+    });
+    if (status != HidlResult::SUCCESS) {
+        *_aidl_return = static_cast<int64_t>(Constant64Bit::INVALID_PRESENTATION_TIME_STAMP);
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus TunerHidlTimeFilter::close() {
+    if (mTimeFilter == nullptr) {
+        ALOGE("ITimeFilter is not initialized");
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(Result::UNAVAILABLE));
+    }
+
+    HidlResult res = mTimeFilter->close();
+    mTimeFilter = nullptr;
+
+    if (res != HidlResult::SUCCESS) {
+        return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+    }
+    return ::ndk::ScopedAStatus::ok();
+}
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
diff --git a/services/tuner/hidl/TunerHidlTimeFilter.h b/services/tuner/hidl/TunerHidlTimeFilter.h
new file mode 100644
index 0000000..78f9c5e
--- /dev/null
+++ b/services/tuner/hidl/TunerHidlTimeFilter.h
@@ -0,0 +1,57 @@
+/**
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TUNERHIDLTIMEFILTER_H
+#define ANDROID_MEDIA_TUNERHIDLTIMEFILTER_H
+
+#include <aidl/android/media/tv/tuner/BnTunerTimeFilter.h>
+#include <android/hardware/tv/tuner/1.0/ITimeFilter.h>
+#include <utils/Log.h>
+
+using ::android::sp;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+using HidlITimeFilter = ::android::hardware::tv::tuner::V1_0::ITimeFilter;
+
+namespace aidl {
+namespace android {
+namespace media {
+namespace tv {
+namespace tuner {
+
+class TunerHidlTimeFilter : public BnTunerTimeFilter {
+public:
+    TunerHidlTimeFilter(sp<HidlITimeFilter> timeFilter);
+    virtual ~TunerHidlTimeFilter();
+
+    ::ndk::ScopedAStatus setTimeStamp(int64_t in_timeStamp) override;
+    ::ndk::ScopedAStatus clearTimeStamp() override;
+    ::ndk::ScopedAStatus getSourceTime(int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus getTimeStamp(int64_t* _aidl_return) override;
+    ::ndk::ScopedAStatus close() override;
+
+private:
+    sp<HidlITimeFilter> mTimeFilter;
+};
+
+}  // namespace tuner
+}  // namespace tv
+}  // namespace media
+}  // namespace android
+}  // namespace aidl
+
+#endif  // ANDROID_MEDIA_TUNERHIDLTIMEFILTER_H
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index 586a0e2..a014dea 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2020 The Android Open Source Project
+ * Copyright (C) 2021 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,30 +14,33 @@
  * limitations under the License.
  */
 
-#include <utils/Log.h>
+#include <android-base/logging.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
-#include <binder/ProcessState.h>
-#include <hidl/HidlTransportSupport.h>
+#include <utils/Log.h>
 
 #include "TunerService.h"
+#include "hidl/TunerHidlService.h"
+
+using ::aidl::android::media::tv::tuner::TunerHidlService;
+using ::aidl::android::media::tv::tuner::TunerService;
 
 using namespace android;
 
-int main(int argc __unused, char** argv) {
+int main() {
     ALOGD("Tuner service starting");
 
-    strcpy(argv[0], "media.tuner");
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
-    ALOGD("ServiceManager: %p", sm.get());
 
-    binder_status_t status = TunerService::instantiate();
+    // Check legacy HIDL HAL first. If it's not existed, use AIDL HAL.
+    binder_status_t status = TunerHidlService::instantiate();
     if (status != STATUS_OK) {
-        ALOGD("Failed to add tuner service as AIDL interface");
-        return -1;
+        status = TunerService::instantiate();
+        CHECK(status == STATUS_OK);
     }
 
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
+    return EXIT_FAILURE;  // should not reached
 }
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
index fd30618..6a3e199 100644
--- a/services/tuner/mediatuner.rc
+++ b/services/tuner/mediatuner.rc
@@ -2,4 +2,7 @@
     class main
     group media
     ioprio rt 4
+    onrestart restart vendor.tuner-hal-1-0
+    onrestart restart vendor.tuner-hal-1-1
+    onrestart restart vendor.tuner-default
     task_profiles ProcessCapacityHigh HighPerformance