Snap for 10453563 from 5c7632e7041e068c22782d4d95380cf11f219ac1 to mainline-media-swcodec-release

Change-Id: Ib813a1b0b0cca4c213a60518b7e80323fd9d9b32
diff --git a/TEST_MAPPING b/TEST_MAPPING
index 60d2893..51a24a9 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -7,7 +7,7 @@
       "name": "libmedia_helper_tests"
     }
   ],
-  "hwasan-postsubmit": [
+  "hwasan-presubmit": [
     {
       "name": "systemaudio_tests"
     },
diff --git a/alsa_utils/alsa_device_proxy.c b/alsa_utils/alsa_device_proxy.c
index 376ae89..dbd2557 100644
--- a/alsa_utils/alsa_device_proxy.c
+++ b/alsa_utils/alsa_device_proxy.c
@@ -44,7 +44,7 @@
 };
 
 int proxy_prepare(alsa_device_proxy * proxy, const alsa_device_profile* profile,
-                   struct pcm_config * config)
+                  struct pcm_config * config, bool require_exact_match)
 {
     int ret = 0;
 
@@ -58,6 +58,8 @@
 
     if (config->format != PCM_FORMAT_INVALID && profile_is_format_valid(profile, config->format)) {
         proxy->alsa_config.format = config->format;
+    } else if (require_exact_match) {
+        ret = -EINVAL;
     } else {
         proxy->alsa_config.format = profile->default_config.format;
         ALOGW("Invalid format %d - using default %d.",
@@ -70,6 +72,8 @@
 
     if (config->rate != 0 && profile_is_sample_rate_valid(profile, config->rate)) {
         proxy->alsa_config.rate = config->rate;
+    } else if (require_exact_match) {
+        ret = -EINVAL;
     } else {
         proxy->alsa_config.rate = profile->default_config.rate;
         ALOGW("Invalid sample rate %u - using default %u.",
@@ -82,6 +86,8 @@
 
     if (config->channels != 0 && profile_is_channel_count_valid(profile, config->channels)) {
         proxy->alsa_config.channels = config->channels;
+    } else if (require_exact_match) {
+        ret = -EINVAL;
     } else {
         proxy->alsa_config.channels = profile_get_closest_channel_count(profile, config->channels);
         ALOGW("Invalid channel count %u - using closest %u.",
@@ -114,7 +120,7 @@
     // let's check to make sure we can ACTUALLY use the maximum rate (with the channel count)
     // Note that profile->sample_rates is sorted highest to lowest, so the scan will get
     // us the highest working rate
-    int max_rate_index = proxy_scan_rates(proxy, profile->sample_rates);
+    int max_rate_index = proxy_scan_rates(proxy, profile->sample_rates, require_exact_match);
     if (max_rate_index >= 0) {
         if (proxy->alsa_config.rate > profile->sample_rates[max_rate_index]) {
             ALOGW("Limiting sampling rate from %u to %u.",
@@ -290,7 +296,9 @@
     }
 }
 
-int proxy_scan_rates(alsa_device_proxy * proxy, const unsigned sample_rates[]) {
+int proxy_scan_rates(alsa_device_proxy * proxy,
+                     const unsigned sample_rates[],
+                     bool require_exact_match) {
     const alsa_device_profile* profile = proxy->profile;
     if (profile->card < 0 || profile->device < 0) {
         return -EINVAL;
@@ -302,6 +310,10 @@
     struct pcm * alsa_pcm;
     int rate_index = 0;
     while (sample_rates[rate_index] != 0) {
+        if (require_exact_match && alsa_config.rate != sample_rates[rate_index]) {
+            rate_index++;
+            continue;
+        }
         alsa_config.rate = sample_rates[rate_index];
         alsa_pcm = pcm_open(profile->card, profile->device,
                 profile->direction | PCM_MONOTONIC, &alsa_config);
diff --git a/alsa_utils/include/alsa_device_proxy.h b/alsa_utils/include/alsa_device_proxy.h
index 49f7019..ed33585 100644
--- a/alsa_utils/include/alsa_device_proxy.h
+++ b/alsa_utils/include/alsa_device_proxy.h
@@ -35,7 +35,7 @@
 
 /* State */
 int proxy_prepare(alsa_device_proxy * proxy, const alsa_device_profile * profile,
-                   struct pcm_config * config);
+                  struct pcm_config * config, bool require_exact_match);
 int proxy_open(alsa_device_proxy * proxy);
 void proxy_close(alsa_device_proxy * proxy);
 int proxy_get_presentation_position(const alsa_device_proxy * proxy,
@@ -56,7 +56,8 @@
  * returns the index of the first rate for which the ALSA device can be opened.
  * return negative value if none work or an error occurs.
  */
-int proxy_scan_rates(alsa_device_proxy * proxy, const unsigned sample_rates[]);
+int proxy_scan_rates(alsa_device_proxy * proxy, const unsigned sample_rates[],
+                     bool require_exact_match);
 
 /* I/O */
 int proxy_write(alsa_device_proxy * proxy, const void *data, unsigned int count);
diff --git a/audio/Android.bp b/audio/Android.bp
index bbce579..5859f7e 100644
--- a/audio/Android.bp
+++ b/audio/Android.bp
@@ -36,7 +36,7 @@
         "//apex_available:platform",
         "com.android.media",
         "com.android.media.swcodec",
-        "com.android.bluetooth",
+        "com.android.btservices",
     ],
 }
 
diff --git a/audio/include/system/audio-base-utils.h b/audio/include/system/audio-base-utils.h
index 525a383..4b81562 100644
--- a/audio/include/system/audio-base-utils.h
+++ b/audio/include/system/audio-base-utils.h
@@ -134,6 +134,7 @@
     AUDIO_USAGE_MAX           = AUDIO_USAGE_CALL_ASSISTANT,
     AUDIO_USAGE_CNT           = AUDIO_USAGE_CALL_ASSISTANT + 1,
 
+    AUDIO_LATENCY_MODE_INVALID = -1,
     AUDIO_LATENCY_MODE_CNT    = AUDIO_LATENCY_MODE_LOW + 1,
 }; // enum
 
@@ -234,6 +235,10 @@
     AUDIO_DEVICE_OUT_BLE_SPEAKER,               // 0x20000001u
 };
 
+static CONST_ARRAY audio_devices_t AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY[] = {
+    AUDIO_DEVICE_OUT_BLE_BROADCAST,             // 0x20000002u
+};
+
 // inline constexpr
 static CONST_ARRAY audio_devices_t AUDIO_DEVICE_IN_ALL_ARRAY[] = {
     AUDIO_DEVICE_IN_COMMUNICATION,              // 0x80000001u
@@ -319,6 +324,8 @@
                                                      AUDIO_DEVICE_OUT_ALL_BLE_ARRAY);
 static const uint32_t AUDIO_DEVICE_OUT_BLE_UNICAST_CNT = AUDIO_ARRAY_SIZE(
                                                      AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY);
+static const uint32_t AUDIO_DEVICE_OUT_BLE_BROADCAST_CNT = AUDIO_ARRAY_SIZE(
+                                                     AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY);
 
 static const uint32_t AUDIO_DEVICE_IN_CNT = AUDIO_ARRAY_SIZE(AUDIO_DEVICE_IN_ALL_ARRAY);
 static const uint32_t AUDIO_DEVICE_IN_SCO_CNT = AUDIO_ARRAY_SIZE(AUDIO_DEVICE_IN_ALL_SCO_ARRAY);
@@ -366,6 +373,8 @@
               "AUDIO_DEVICE_OUT_ALL_BLE_ARRAY must be sorted");
 static_assert(isSorted(AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY),
               "AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY must be sorted");
+static_assert(isSorted(AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY),
+              "AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY must be sorted");
 static_assert(isSorted(AUDIO_DEVICE_IN_ALL_ARRAY),
               "AUDIO_DEVICE_IN_ALL_ARRAY must be sorted");
 static_assert(isSorted(AUDIO_DEVICE_IN_ALL_SCO_ARRAY),
@@ -384,6 +393,7 @@
 static_assert(AUDIO_DEVICE_OUT_DIGITAL_CNT == std::size(AUDIO_DEVICE_OUT_ALL_DIGITAL_ARRAY));
 static_assert(AUDIO_DEVICE_OUT_BLE_CNT == std::size(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY));
 static_assert(AUDIO_DEVICE_OUT_BLE_UNICAST_CNT == std::size(AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY));
+static_assert(AUDIO_DEVICE_OUT_BLE_BROADCAST_CNT == std::size(AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY));
 static_assert(AUDIO_DEVICE_IN_CNT == std::size(AUDIO_DEVICE_IN_ALL_ARRAY));
 static_assert(AUDIO_DEVICE_IN_SCO_CNT == std::size(AUDIO_DEVICE_IN_ALL_SCO_ARRAY));
 static_assert(AUDIO_DEVICE_IN_USB_CNT == std::size(AUDIO_DEVICE_IN_ALL_USB_ARRAY));
diff --git a/audio/include/system/audio-base-v7.0.h b/audio/include/system/audio-base-v7.0.h
deleted file mode 100644
index d5c4452..0000000
--- a/audio/include/system/audio-base-v7.0.h
+++ /dev/null
@@ -1,62 +0,0 @@
-// This file is autogenerated by hidl-gen. Do not edit manually.
-// Source: android.hardware.audio@7.0
-// Location: hardware/interfaces/audio/7.0/
-
-#ifndef HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_V7_0_EXPORTED_CONSTANTS_H_
-#define HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_V7_0_EXPORTED_CONSTANTS_H_
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-typedef enum {
-    AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED = 0u,
-    AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT = 1u,
-    AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED = 2u,
-} audio_microphone_channel_mapping_t;
-
-typedef enum {
-    AUDIO_MICROPHONE_LOCATION_UNKNOWN = 0u,
-    AUDIO_MICROPHONE_LOCATION_MAINBODY = 1u,
-    AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE = 2u,
-    AUDIO_MICROPHONE_LOCATION_PERIPHERAL = 3u,
-} audio_microphone_location_t;
-
-typedef enum {
-    AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN = 0u,
-    AUDIO_MICROPHONE_DIRECTIONALITY_OMNI = 1u,
-    AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL = 2u,
-    AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID = 3u,
-    AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID = 4u,
-    AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID = 5u,
-} audio_microphone_directionality_t;
-
-typedef enum {
-    MIC_DIRECTION_UNSPECIFIED = 0,
-    MIC_DIRECTION_FRONT = 1,
-    MIC_DIRECTION_BACK = 2,
-    MIC_DIRECTION_EXTERNAL = 3,
-} audio_microphone_direction_t;
-
-typedef enum {
-    AUDIO_DUAL_MONO_MODE_OFF = 0,
-    AUDIO_DUAL_MONO_MODE_LR = 1,
-    AUDIO_DUAL_MONO_MODE_LL = 2,
-    AUDIO_DUAL_MONO_MODE_RR = 3,
-} audio_dual_mono_mode_t;
-
-typedef enum {
-    AUDIO_TIMESTRETCH_STRETCH_DEFAULT = 0,
-    AUDIO_TIMESTRETCH_STRETCH_VOICE = 1,
-} audio_timestretch_stretch_mode_t;
-
-enum {
-    HAL_AUDIO_TIMESTRETCH_FALLBACK_MUTE = 1,
-    HAL_AUDIO_TIMESTRETCH_FALLBACK_FAIL = 2,
-};
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif  // HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_V7_0_EXPORTED_CONSTANTS_H_
diff --git a/audio/include/system/audio-base-v7.1.h b/audio/include/system/audio-base-v7.1.h
deleted file mode 100644
index 18edcf8..0000000
--- a/audio/include/system/audio-base-v7.1.h
+++ /dev/null
@@ -1,21 +0,0 @@
-// This file is autogenerated by hidl-gen. Do not edit manually.
-// Source: android.hardware.audio@7.1
-// Location: hardware/interfaces/audio/7.1/
-
-#ifndef HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_V7_1_EXPORTED_CONSTANTS_H_
-#define HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_V7_1_EXPORTED_CONSTANTS_H_
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-typedef enum {
-    AUDIO_LATENCY_MODE_FREE = 0,
-    AUDIO_LATENCY_MODE_LOW = 1,
-} audio_latency_mode_t;
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif  // HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_V7_1_EXPORTED_CONSTANTS_H_
diff --git a/audio/include/system/audio-base.h b/audio/include/system/audio-base.h
index 767c488..ab80646 100644
--- a/audio/include/system/audio-base.h
+++ b/audio/include/system/audio-base.h
@@ -14,10 +14,68 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_HARDWARE_AUDIO_V7_x_EXPORTED_CONSTANTS_H_
-#define ANDROID_HARDWARE_AUDIO_V7_x_EXPORTED_CONSTANTS_H_
+#ifndef ANDROID_HARDWARE_AUDIO_HAL_EXPORTED_CONSTANTS_H_
+#define ANDROID_HARDWARE_AUDIO_HAL_EXPORTED_CONSTANTS_H_
 
-#include "audio-base-v7.0.h"
-#include "audio-base-v7.1.h"
+#ifdef __cplusplus
+extern "C" {
+#endif
 
-#endif  // ANDROID_HARDWARE_AUDIO_V7_x_EXPORTED_CONSTANTS_H_
+typedef enum {
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED = 0u,
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT = 1u,
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED = 2u,
+} audio_microphone_channel_mapping_t;
+
+typedef enum {
+    AUDIO_MICROPHONE_LOCATION_UNKNOWN = 0u,
+    AUDIO_MICROPHONE_LOCATION_MAINBODY = 1u,
+    AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE = 2u,
+    AUDIO_MICROPHONE_LOCATION_PERIPHERAL = 3u,
+} audio_microphone_location_t;
+
+typedef enum {
+    AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN = 0u,
+    AUDIO_MICROPHONE_DIRECTIONALITY_OMNI = 1u,
+    AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL = 2u,
+    AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID = 3u,
+    AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID = 4u,
+    AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID = 5u,
+} audio_microphone_directionality_t;
+
+typedef enum {
+    MIC_DIRECTION_UNSPECIFIED = 0,
+    MIC_DIRECTION_FRONT = 1,
+    MIC_DIRECTION_BACK = 2,
+    MIC_DIRECTION_EXTERNAL = 3,
+} audio_microphone_direction_t;
+
+typedef enum {
+    AUDIO_DUAL_MONO_MODE_OFF = 0,
+    AUDIO_DUAL_MONO_MODE_LR = 1,
+    AUDIO_DUAL_MONO_MODE_LL = 2,
+    AUDIO_DUAL_MONO_MODE_RR = 3,
+} audio_dual_mono_mode_t;
+
+typedef enum {
+    AUDIO_TIMESTRETCH_STRETCH_DEFAULT = 0,
+    AUDIO_TIMESTRETCH_STRETCH_VOICE = 1,
+} audio_timestretch_stretch_mode_t;
+
+enum {
+    HAL_AUDIO_TIMESTRETCH_FALLBACK_MUTE = 1,
+    HAL_AUDIO_TIMESTRETCH_FALLBACK_FAIL = 2,
+};
+
+typedef enum {
+    AUDIO_LATENCY_MODE_FREE = 0,
+    AUDIO_LATENCY_MODE_LOW = 1,
+    AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE = 2,
+    AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE = 3,
+} audio_latency_mode_t;
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // ANDROID_HARDWARE_AUDIO_HAL_EXPORTED_CONSTANTS_H_
diff --git a/audio/include/system/audio-hal-enums.h b/audio/include/system/audio-hal-enums.h
index 8e89899..bb00e81 100644
--- a/audio/include/system/audio-hal-enums.h
+++ b/audio/include/system/audio-hal-enums.h
@@ -229,7 +229,7 @@
 
 // The "channel mask" enum is comprised of discrete channels,
 // their combinations (masks), and special values.
-typedef enum {
+typedef enum : uint32_t {
     AUDIO_CHANNEL_OUT_DISCRETE_CHANNEL_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
     AUDIO_CHANNEL_IN_DISCRETE_CHANNEL_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
     AUDIO_CHANNEL_IN_OUT_MASK_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
@@ -302,7 +302,7 @@
     V(AUDIO_CONTENT_TYPE_SONIFICATION, 4) \
     V(AUDIO_CONTENT_TYPE_ULTRASOUND, 1997)
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_CONTENT_TYPE_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_content_type_t;
 
@@ -402,7 +402,7 @@
     V(AUDIO_DEVICE_IN_AUX_DIGITAL, AUDIO_DEVICE_IN_HDMI) \
     V(AUDIO_DEVICE_IN_STUB, AUDIO_DEVICE_IN_DEFAULT)
 
-typedef enum {
+typedef enum : uint32_t {
     AUDIO_DEVICE_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_devices_t;
 
@@ -441,9 +441,10 @@
     V(AUDIO_OUTPUT_FLAG_INCALL_MUSIC, 0x10000) \
     V(AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD, 0x20000) \
     V(AUDIO_OUTPUT_FLAG_SPATIALIZER, 0x40000) \
-    V(AUDIO_OUTPUT_FLAG_ULTRASOUND, 0x80000)
+    V(AUDIO_OUTPUT_FLAG_ULTRASOUND, 0x80000) \
+    V(AUDIO_OUTPUT_FLAG_BIT_PERFECT, 0x100000)
 
-typedef enum {
+typedef enum : uint32_t {
     AUDIO_OUTPUT_FLAG_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_output_flags_t;
 
@@ -472,9 +473,11 @@
     V(AUDIO_INPUT_FLAG_VOIP_TX, 0x20) \
     V(AUDIO_INPUT_FLAG_HW_AV_SYNC, 0x40) \
     V(AUDIO_INPUT_FLAG_DIRECT, 0x80) \
-    V(AUDIO_INPUT_FLAG_ULTRASOUND, 0x100)
+    V(AUDIO_INPUT_FLAG_ULTRASOUND, 0x100) \
+    V(AUDIO_INPUT_FLAG_HOTWORD_TAP, 0x200) \
+    V(AUDIO_INPUT_FLAG_HW_LOOKBACK, 0x400) \
 
-typedef enum {
+typedef enum : uint32_t {
     AUDIO_INPUT_FLAG_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_input_flags_t;
 
@@ -617,13 +620,17 @@
     V(AUDIO_FORMAT_MPEGH_LC_L4, AUDIO_FORMAT_MPEGH | AUDIO_FORMAT_MPEGH_SUB_LC_L4) \
     V(AUDIO_FORMAT_IEC60958, 0x2D000000u) \
     V(AUDIO_FORMAT_DTS_UHD, 0x2E000000u) \
-    V(AUDIO_FORMAT_DRA, 0x2F000000u)
+    V(AUDIO_FORMAT_DRA, 0x2F000000u) \
+    V(AUDIO_FORMAT_APTX_ADAPTIVE_QLEA, 0x30000000u) \
+    V(AUDIO_FORMAT_APTX_ADAPTIVE_R4, 0x31000000u) \
+    V(AUDIO_FORMAT_DTS_HD_MA, 0x32000000u) \
+    V(AUDIO_FORMAT_DTS_UHD_P2, 0x33000000u)
 
 #define AUDIO_FORMAT_LIST_DEF(V) \
     AUDIO_FORMAT_LIST_UNIQUE_DEF(V) \
     V(VX_AUDIO_FORMAT_LC3, AUDIO_FORMAT_LC3)
 
-typedef enum {
+typedef enum : uint32_t {
     AUDIO_FORMAT_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
     // These values must be part of the enum, but they are not valid formats,
     // and thus don't participate in to/from string conversions.
@@ -652,7 +659,7 @@
     V(AUDIO_GAIN_MODE_CHANNELS, 2) \
     V(AUDIO_GAIN_MODE_RAMP, 4)
 
-typedef enum {
+typedef enum : uint32_t {
     AUDIO_GAIN_MODE_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_gain_mode_t;
 
@@ -695,7 +702,7 @@
     V(AUDIO_SOURCE_INVALID, -1)
 #endif  // AUDIO_NO_SYSTEM_DECLARATIONS
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_SOURCE_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_source_t;
 
@@ -738,7 +745,7 @@
     V(AUDIO_STREAM_DEFAULT, -1)
 #endif  // AUDIO_NO_SYSTEM_DECLARATIONS
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_STREAM_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_stream_type_t;
 
@@ -788,7 +795,7 @@
     V(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED, 9)
 #endif  // AUDIO_NO_SYSTEM_DECLARATIONS
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_USAGE_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_usage_t;
 
@@ -800,7 +807,7 @@
 }
 
 inline bool audio_usage_from_string(const char* s, audio_usage_t* t) {
-    AUDIO_USAGE_LIST_NO_SYS_DEF(AUDIO_DEFINE_PARSE_CASE_V)
+    AUDIO_USAGE_LIST_DEF(AUDIO_DEFINE_PARSE_CASE_V)
     return false;
 }
 
@@ -809,9 +816,10 @@
 
 #define AUDIO_ENCAPSULATION_TYPE_LIST_DEF(V) \
     V(AUDIO_ENCAPSULATION_TYPE_NONE, 0) \
-    V(AUDIO_ENCAPSULATION_TYPE_IEC61937, 1)
+    V(AUDIO_ENCAPSULATION_TYPE_IEC61937, 1) \
+    V(AUDIO_ENCAPSULATION_TYPE_PCM, 2)
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_ENCAPSULATION_TYPE_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
 } audio_encapsulation_type_t;
 
diff --git a/audio/include/system/audio.h b/audio/include/system/audio.h
index b846595..c8598ab 100644
--- a/audio/include/system/audio.h
+++ b/audio/include/system/audio.h
@@ -18,6 +18,7 @@
 #ifndef ANDROID_AUDIO_CORE_H
 #define ANDROID_AUDIO_CORE_H
 
+#include <float.h>
 #include <stdbool.h>
 #include <stdint.h>
 #include <stdio.h>
@@ -44,6 +45,12 @@
 #endif // __cplusplus
 #endif // FALLTHROUGH_INTENDED
 
+#ifdef __cplusplus
+#define CONSTEXPR constexpr
+#else
+#define CONSTEXPR
+#endif
+
 __BEGIN_DECLS
 
 /* The enums were moved here mostly from
@@ -118,6 +125,8 @@
     audio_flags_mask_t   flags;
     char                 tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE]; /* UTF8 */
 } __attribute__((packed)) audio_attributes_t; // sent through Binder;
+/** The separator for tags. */
+static const char AUDIO_ATTRIBUTES_TAGS_SEPARATOR = ';';
 
 static const audio_attributes_t AUDIO_ATTRIBUTES_INITIALIZER = {
     /* .content_type = */ AUDIO_CONTENT_TYPE_UNKNOWN,
@@ -193,7 +202,7 @@
     return (audio_unique_id_use_t) (id & AUDIO_UNIQUE_ID_USE_MASK);
 }
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_SESSION_DEVICE = HAL_AUDIO_SESSION_DEVICE,
     AUDIO_SESSION_OUTPUT_STAGE = HAL_AUDIO_SESSION_OUTPUT_STAGE,
     AUDIO_SESSION_OUTPUT_MIX = HAL_AUDIO_SESSION_OUTPUT_MIX,
@@ -267,7 +276,7 @@
 #define AUDIO_CHANNEL_REPRESENTATION_LOG2   2
 
 /* The return value is undefined if the channel mask is invalid. */
-static inline uint32_t audio_channel_mask_get_bits(audio_channel_mask_t channel)
+static inline CONSTEXPR uint32_t audio_channel_mask_get_bits(audio_channel_mask_t channel)
 {
     return channel & ((1 << AUDIO_CHANNEL_COUNT_MAX) - 1);
 }
@@ -278,7 +287,7 @@
 } audio_channel_representation_t;
 
 /* The return value is undefined if the channel mask is invalid. */
-static inline audio_channel_representation_t audio_channel_mask_get_representation(
+static inline CONSTEXPR audio_channel_representation_t audio_channel_mask_get_representation(
         audio_channel_mask_t channel)
 {
     // The right shift should be sufficient, but also "and" for safety in case mask is not 32 bits
@@ -288,7 +297,7 @@
 
 #ifdef __cplusplus
 // Some effects use `int32_t` directly for channel mask.
-static inline uint32_t audio_channel_mask_get_representation(int32_t mask) {
+static inline constexpr uint32_t audio_channel_mask_get_representation(int32_t mask) {
     return audio_channel_mask_get_representation(static_cast<audio_channel_mask_t>(mask));
 }
 #endif
@@ -300,7 +309,7 @@
  * or because an input mask has an invalid input bit set.
  * All other APIs that take a channel mask assume that it is valid.
  */
-static inline bool audio_channel_mask_is_valid(audio_channel_mask_t channel)
+static inline CONSTEXPR bool audio_channel_mask_is_valid(audio_channel_mask_t channel)
 {
     uint32_t bits = audio_channel_mask_get_bits(channel);
     audio_channel_representation_t representation = audio_channel_mask_get_representation(channel);
@@ -316,14 +325,15 @@
 }
 
 /* Not part of public API */
-static inline audio_channel_mask_t audio_channel_mask_from_representation_and_bits(
+static inline CONSTEXPR audio_channel_mask_t audio_channel_mask_from_representation_and_bits(
         audio_channel_representation_t representation, uint32_t bits)
 {
     return (audio_channel_mask_t) ((representation << AUDIO_CHANNEL_COUNT_MAX) | bits);
 }
 
 /*
- * Returns true so long as Quadraphonic channels (FL, FR, BL, BR) are completely specified
+ * Returns true so long as Quadraphonic channels (FL, FR, BL, BR)
+ * or (FL, FR, SL, SR) are completely specified
  * in the channel mask. We expect these 4 channels to be the minimum for
  * reasonable spatializer effect quality.
  *
@@ -338,10 +348,46 @@
  * AUDIO_CHANNEL_OUT_13POINT_360RA
  * AUDIO_CHANNEL_OUT_22POINT2
  */
-static inline bool audio_is_channel_mask_spatialized(audio_channel_mask_t channelMask) {
+static inline CONSTEXPR bool audio_is_channel_mask_spatialized(audio_channel_mask_t channelMask) {
     return audio_channel_mask_get_representation(channelMask)
                 == AUDIO_CHANNEL_REPRESENTATION_POSITION
-            && (channelMask & AUDIO_CHANNEL_OUT_QUAD) == AUDIO_CHANNEL_OUT_QUAD;
+            && ((channelMask & AUDIO_CHANNEL_OUT_QUAD) == AUDIO_CHANNEL_OUT_QUAD
+                || (channelMask & AUDIO_CHANNEL_OUT_QUAD_SIDE) == AUDIO_CHANNEL_OUT_QUAD_SIDE);
+}
+
+/*
+ * MediaFormat channel masks follow the Java channel mask spec
+ * but might be specified as a native channel mask.  This method
+ * does a "smart" correction to ensure a native channel mask.
+ */
+static inline audio_channel_mask_t
+audio_channel_mask_from_media_format_mask(int32_t channelMaskFromFormat) {
+    // KEY_CHANNEL_MASK follows the android.media.AudioFormat java mask
+    // which is left-bitshifted by 2 relative to the native mask
+    if ((channelMaskFromFormat & 0b11) != 0) {
+        // received an unexpected mask (supposed to follow AudioFormat constants
+        // for output masks with the 2 least-significant bits at 0), but
+        // it may come from an extractor that uses native masks: keeping
+        // the mask as given is ok as it contains at least mono or stereo
+        // and potentially the haptic channels
+        return (audio_channel_mask_t)channelMaskFromFormat;
+    } else {
+        // We exclude bits from the lowest haptic bit all the way to the top of int.
+        // to avoid aliasing.  The remainder bits are position bits
+        // which must be shifted by 2 from Java to get native.
+        //
+        // Using the lowest set bit exclusion AND mask (x - 1), we find
+        // all the bits from lowest set bit to the top is m = x | ~(x - 1).
+        // Using the one's complement to two's complement formula ~x = -x - 1,
+        // we can reduce this to m = x | -x.
+        // (Note -x is also the lowest bit extraction AND mask; i.e. lowest_bit = x & -x).
+        const int32_t EXCLUDE_BITS = AUDIO_CHANNEL_HAPTIC_ALL | -AUDIO_CHANNEL_HAPTIC_ALL;
+        const int32_t positionBits = (channelMaskFromFormat & ~EXCLUDE_BITS) >> 2;
+
+        // Haptic bits are identical between Java and native.
+        const int32_t hapticBits = channelMaskFromFormat & AUDIO_CHANNEL_HAPTIC_ALL;
+        return (audio_channel_mask_t)(positionBits | hapticBits);
+    }
 }
 
 /**
@@ -699,9 +745,11 @@
     } ext;
 };
 
-typedef enum {
+typedef enum : int32_t {
     AUDIO_STANDARD_NONE = 0,
     AUDIO_STANDARD_EDID = 1,
+    AUDIO_STANDARD_SADB = 2,
+    AUDIO_STANDARD_VSADB = 3,
 } audio_standard_t;
 
 /**
@@ -1415,6 +1463,13 @@
             AUDIO_DEVICE_OUT_BLE_UNICAST_CNT, device);
 }
 
+static inline bool audio_is_ble_broadcast_device(audio_devices_t device)
+{
+    return audio_binary_search_device_array(
+            AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY, 0 /*left*/,
+            AUDIO_DEVICE_OUT_BLE_BROADCAST_CNT, device);
+}
+
 static inline bool audio_is_ble_in_device(audio_devices_t device)
 {
     return audio_binary_search_device_array(
@@ -1454,7 +1509,7 @@
  *  there are no channel bits set which could _not_ correspond to an output channel.
  * Otherwise returns false.
  */
-static inline bool audio_is_output_channel(audio_channel_mask_t channel)
+static inline CONSTEXPR bool audio_is_output_channel(audio_channel_mask_t channel)
 {
     uint32_t bits = audio_channel_mask_get_bits(channel);
     switch (audio_channel_mask_get_representation(channel)) {
@@ -1476,7 +1531,7 @@
  * it is excluded from the count.
  * Returns zero if the representation is invalid.
  */
-static inline uint32_t audio_channel_count_from_in_mask(audio_channel_mask_t channel)
+static inline CONSTEXPR uint32_t audio_channel_count_from_in_mask(audio_channel_mask_t channel)
 {
     uint32_t bits = audio_channel_mask_get_bits(channel);
     switch (audio_channel_mask_get_representation(channel)) {
@@ -1494,7 +1549,7 @@
 #ifdef __cplusplus
 // FIXME(b/169889714): buffer_config_t uses `uint32_t` for the mask.
 // A lot of effects code thus use `uint32_t` directly.
-static inline uint32_t audio_channel_count_from_in_mask(uint32_t mask) {
+static inline CONSTEXPR uint32_t audio_channel_count_from_in_mask(uint32_t mask) {
     return audio_channel_count_from_in_mask(static_cast<audio_channel_mask_t>(mask));
 }
 #endif
@@ -1505,7 +1560,7 @@
  * it is excluded from the count.
  * Returns zero if the representation is invalid.
  */
-static inline uint32_t audio_channel_count_from_out_mask(audio_channel_mask_t channel)
+static inline CONSTEXPR uint32_t audio_channel_count_from_out_mask(audio_channel_mask_t channel)
 {
     uint32_t bits = audio_channel_mask_get_bits(channel);
     switch (audio_channel_mask_get_representation(channel)) {
@@ -1523,7 +1578,7 @@
 #ifdef __cplusplus
 // FIXME(b/169889714): buffer_config_t uses `uint32_t` for the mask.
 // A lot of effects code thus use `uint32_t` directly.
-static inline uint32_t audio_channel_count_from_out_mask(uint32_t mask) {
+static inline CONSTEXPR uint32_t audio_channel_count_from_out_mask(uint32_t mask) {
     return audio_channel_count_from_out_mask(static_cast<audio_channel_mask_t>(mask));
 }
 #endif
@@ -1533,7 +1588,7 @@
  * or AUDIO_CHANNEL_NONE if the channel count is zero,
  * or AUDIO_CHANNEL_INVALID if the channel count exceeds AUDIO_CHANNEL_COUNT_MAX.
  */
-static inline audio_channel_mask_t audio_channel_mask_for_index_assignment_from_count(
+static inline CONSTEXPR audio_channel_mask_t audio_channel_mask_for_index_assignment_from_count(
         uint32_t channel_count)
 {
     if (channel_count == 0) {
@@ -1557,9 +1612,10 @@
  * or AUDIO_CHANNEL_INVALID if the channel count exceeds that of the
  * configurations for which a default output channel mask is defined.
  */
-static inline audio_channel_mask_t audio_channel_out_mask_from_count(uint32_t channel_count)
+static inline CONSTEXPR audio_channel_mask_t audio_channel_out_mask_from_count(
+        uint32_t channel_count)
 {
-    uint32_t bits;
+    uint32_t bits = 0;
     switch (channel_count) {
     case 0:
         return AUDIO_CHANNEL_NONE;
@@ -1587,6 +1643,9 @@
     case FCC_8:
         bits = AUDIO_CHANNEL_OUT_7POINT1;
         break;
+    case 10: // 5.1.4
+        bits = AUDIO_CHANNEL_OUT_5POINT1POINT4;
+        break;
     case FCC_12:
         bits = AUDIO_CHANNEL_OUT_7POINT1POINT4;
         break;
@@ -1607,9 +1666,10 @@
  * or AUDIO_CHANNEL_INVALID if the channel count exceeds that of the
  * configurations for which a default input channel mask is defined.
  */
-static inline audio_channel_mask_t audio_channel_in_mask_from_count(uint32_t channel_count)
+static inline CONSTEXPR audio_channel_mask_t audio_channel_in_mask_from_count(
+        uint32_t channel_count)
 {
-    uint32_t bits;
+    uint32_t bits = 0;
     switch (channel_count) {
     case 0:
         return AUDIO_CHANNEL_NONE;
@@ -1689,6 +1749,12 @@
     }
 }
 
+static inline audio_channel_mask_t audio_channel_mask_out_to_in_index_mask(audio_channel_mask_t out)
+{
+    return audio_channel_mask_for_index_assignment_from_count(
+            audio_channel_count_from_out_mask(out));
+}
+
 static inline bool audio_channel_position_mask_is_out_canonical(audio_channel_mask_t channelMask)
 {
     if (audio_channel_mask_get_representation(channelMask)
@@ -1831,6 +1897,8 @@
     case AUDIO_FORMAT_LHDC_LL:
     case AUDIO_FORMAT_APTX_TWSP:
     case AUDIO_FORMAT_LC3:
+    case AUDIO_FORMAT_APTX_ADAPTIVE_QLEA:
+    case AUDIO_FORMAT_APTX_ADAPTIVE_R4:
         return true;
     case AUDIO_FORMAT_MPEGH:
         switch (format) {
@@ -1845,6 +1913,8 @@
         /* not reached */
     case AUDIO_FORMAT_DTS_UHD:
     case AUDIO_FORMAT_DRA:
+    case AUDIO_FORMAT_DTS_HD_MA:
+    case AUDIO_FORMAT_DTS_UHD_P2:
         return true;
     default:
         return false;
@@ -2161,6 +2231,49 @@
 } audio_offload_mode_t;
 #endif // AUDIO_NO_SYSTEM_DECLARATIONS
 
+typedef enum : int32_t {
+    AUDIO_MIXER_BEHAVIOR_INVALID = -1,
+    AUDIO_MIXER_BEHAVIOR_DEFAULT = 0,
+    AUDIO_MIXER_BEHAVIOR_BIT_PERFECT = 1,
+} audio_mixer_behavior_t;
+
+struct audio_mixer_attributes {
+    audio_config_base_t config;
+    audio_mixer_behavior_t mixer_behavior;
+};
+
+typedef struct audio_mixer_attributes audio_mixer_attributes_t;
+
+static const audio_mixer_attributes_t AUDIO_MIXER_ATTRIBUTES_INITIALIZER = {
+    /* .config */ {
+        /* .sample_rate*/ 0,
+        /* .channel_mask*/ AUDIO_CHANNEL_NONE,
+        /* .format */ AUDIO_FORMAT_DEFAULT,
+    },
+    /* .mixer_behavior */ AUDIO_MIXER_BEHAVIOR_DEFAULT,
+};
+
+static inline audio_output_flags_t audio_output_flags_from_mixer_behavior(
+        audio_mixer_behavior_t mixerBehavior) {
+    switch (mixerBehavior) {
+        case AUDIO_MIXER_BEHAVIOR_BIT_PERFECT:
+            return AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+        case AUDIO_MIXER_BEHAVIOR_DEFAULT:
+        default:
+            return AUDIO_OUTPUT_FLAG_NONE;
+    }
+}
+
+inline const char* audio_channel_mask_to_string(audio_channel_mask_t channel_mask) {
+    if (audio_is_input_channel(channel_mask)) {
+        return audio_channel_in_mask_to_string(channel_mask);
+    } else if (audio_is_output_channel(channel_mask)) {
+        return audio_channel_out_mask_to_string(channel_mask);
+    } else {
+        return audio_channel_index_mask_to_string(channel_mask);
+    }
+}
+
 __END_DECLS
 
 /**
@@ -2204,14 +2317,25 @@
 
 #define AUDIO_PARAMETER_VALUE_ON "on"
 #define AUDIO_PARAMETER_VALUE_OFF "off"
+#define AUDIO_PARAMETER_VALUE_TRUE "true"
+#define AUDIO_PARAMETER_VALUE_FALSE "false"
 
 /**
  *  audio device parameters
  */
 
+/* Used to enable or disable BT SCO */
+#define AUDIO_PARAMETER_KEY_BT_SCO "BT_SCO"
+
 /* BT SCO Noise Reduction + Echo Cancellation parameters */
 #define AUDIO_PARAMETER_KEY_BT_NREC "bt_headset_nrec"
 
+/* Used to enable or disable BT A2DP */
+#define AUDIO_PARAMETER_KEY_BT_A2DP_SUSPENDED "A2dpSuspended"
+
+/* Used to enable or disable BT LE */
+#define AUDIO_PARAMETER_KEY_BT_LE_SUSPENDED "LeAudioSuspended"
+
 /* Get a new HW synchronization source identifier.
  * Return a valid source (positive integer) or AUDIO_HW_SYNC_INVALID if an error occurs
  * or no HW sync is available. */
@@ -2224,6 +2348,18 @@
  * used to select a specific language presentation for next generation audio codecs. */
 #define AUDIO_PARAMETER_KEY_AUDIO_LANGUAGE_PREFERRED "audio_language_preferred"
 
+/* Set to "true" when the AudioOutputDescriptor is closing.
+ * This notification is used by A2DP HAL.
+ * TODO(b/73175392) unify with exiting in the AIDL interface.
+ */
+#define AUDIO_PARAMETER_KEY_CLOSING "closing"
+
+/* Set to "1" on AudioFlinger preExit() for the thread.
+ * This notification is used by the remote submix and A2DP HAL.
+ * TODO(b/73175392) unify with closing in the AIDL interface.
+ */
+#define AUDIO_PARAMETER_KEY_EXITING "exiting"
+
 /**
  *  audio stream parameters
  */
diff --git a/audio/include/system/audio_config.h b/audio/include/system/audio_config.h
index b9fd6b5..c4b4e6b 100644
--- a/audio/include/system/audio_config.h
+++ b/audio/include/system/audio_config.h
@@ -43,10 +43,14 @@
     return paths;
 }
 
+static inline bool audio_is_readable_configuration_file(const char* filePath) {
+    return (access(filePath, R_OK) == 0);
+}
+
 static inline std::string audio_find_readable_configuration_file(const char* fileName) {
     for (const auto& path : audio_get_configuration_paths()) {
         std::string tryPath = path + "/" + fileName;
-        if (access(tryPath.c_str(), R_OK) == 0) {
+        if (audio_is_readable_configuration_file(tryPath.c_str())) {
             return tryPath;
         }
     }
diff --git a/audio/include/system/audio_effects/aidl_effects_utils.h b/audio/include/system/audio_effects/aidl_effects_utils.h
new file mode 100644
index 0000000..c46da27
--- /dev/null
+++ b/audio/include/system/audio_effects/aidl_effects_utils.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+#include <optional>
+
+#include <aidl/android/hardware/audio/effect/AcousticEchoCanceler.h>
+#include <aidl/android/hardware/audio/effect/DynamicsProcessing.h>
+#include <aidl/android/hardware/audio/effect/Parameter.h>
+#include <aidl/android/hardware/audio/effect/Range.h>
+
+namespace aidl::android::hardware::audio::effect {
+
+/**
+ * EventFlag to indicate that the client has written data to the FMQ, align with EffectHalAidl.
+ * TODO: b/277900230, Define in future AIDL version.
+ */
+static constexpr uint32_t kEventFlagNotEmpty = 0x1;
+
+/**
+ * Check the target Parameter with $Parameter$Range definition in Capability.
+ * This method go through the elements in the ranges to find a matching tag for the target
+ * parameter, and check if the target parameter is inside range use the default AIDL union
+ * comparator.
+ *
+ * Absence of a corresponding range is an indication that there are no limits set on the parameter
+ * so this method return true.
+ */
+template <typename T, typename R>
+bool inRange(const T& target, const R& ranges) {
+  for (const auto& r : ranges) {
+    if (target.getTag() == r.min.getTag() &&
+        target.getTag() == r.max.getTag() &&
+        (target < r.min || target > r.max)) {
+      return false;
+    }
+  }
+  return true;
+}
+
+template <typename Range::Tag rangeTag, typename T>
+bool inRange(const T& target, const Capability& cap) {
+  if (cap.range.getTag() == rangeTag) {
+      const auto& ranges = cap.range.template get<rangeTag>();
+      return inRange(target, ranges);
+  }
+  return true;
+}
+
+template <typename T, typename R>
+bool isRangeValid(const T& tag, const R& ranges) {
+  for (const auto& r : ranges) {
+    if (tag == r.min.getTag() && tag == r.max.getTag()) {
+      return r.min <= r.max;
+    }
+  }
+
+  return true;
+}
+
+template <typename Range::Tag rangeTag, typename T>
+bool isRangeValid(const T& paramTag, const Capability& cap) {
+  if (cap.range.getTag() == rangeTag) {
+      const auto& ranges = cap.range.template get<rangeTag>();
+      return isRangeValid(paramTag, ranges);
+  }
+  return true;
+}
+
+}  // namespace aidl::android::hardware::audio::effect
diff --git a/audio/include/system/audio_effects/audio_effects_utils.h b/audio/include/system/audio_effects/audio_effects_utils.h
new file mode 100644
index 0000000..972e64b
--- /dev/null
+++ b/audio/include/system/audio_effects/audio_effects_utils.h
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <sstream>
+#include <string>
+#include <sys/types.h>
+#include <utils/Errors.h>
+
+#include <system/audio_effect.h>
+
+namespace android {
+namespace effect {
+namespace utils {
+
+/**
+ * A wrapper class of legacy effect_param_t to avoid raw pointer read/write everywhere.
+ * The wrapper use the reference of underlying effect_param_t structure, and does not manage
+ * ownership of the structure.
+ * Thread safety is not in consideration in this class for now.
+ */
+class EffectParamWrapper {
+ public:
+  explicit EffectParamWrapper(effect_param_t& param) : mParam(param) {}
+
+  // validate command size to be at least parameterSize + valueSize after effect_param_t
+  bool validateCmdSize(size_t cmdSize) const {
+    return (uint64_t)getPaddedParameterSize() + mParam.vsize + sizeof(effect_param_t) <=
+           cmdSize;
+  }
+
+  /**
+   *  validate parameter and value size to be at least equals to the target size.
+   */
+  bool validateParamValueSize(size_t paramSize, size_t valueSize) const {
+    return mParam.psize >= paramSize && mParam.vsize >= valueSize;
+  }
+
+  std::string toString() const {
+    std::ostringstream os;
+    os << "effect_param_t: { ";
+    os << "status: " << mParam.status << ", p: " << mParam.psize
+       << " (padded: " << getPaddedParameterSize() << "), v: " << mParam.vsize
+       << ", dataAddr: " << &mParam.data;
+    os << "}";
+    return os.str();
+  }
+
+  status_t copyDataWithCheck(void* dst, const void* src, size_t len,
+                                    size_t offset, size_t max) {
+    if (!dst || !src || len + offset > max) {
+      return BAD_VALUE;
+    }
+    std::memcpy(dst, src, len);
+    return OK;
+  }
+
+  status_t readFromData(void* buf, size_t len, size_t offset, size_t max) {
+    return copyDataWithCheck(buf, mParam.data + offset, len, offset, max);
+  }
+
+  status_t getStatus() const { return mParam.status; }
+  size_t getPaddedParameterSize() const { return padding(mParam.psize); }
+  size_t getParameterSize() const { return mParam.psize; }
+  size_t getValueSize() const { return mParam.vsize; }
+  const uint8_t* getValueAddress() const {
+    return (uint8_t*)mParam.data + getPaddedParameterSize();
+  }
+
+  uint64_t getTotalSize() const {
+    return (uint64_t)sizeof(effect_param_t) + getPaddedParameterSize() + getValueSize();
+  }
+
+  /**
+   * Get reference to effect_param_t.
+   */
+  const effect_param_t& getEffectParam() const { return mParam; }
+
+  bool operator==(const EffectParamWrapper& other) const {
+    return (&other == this) || 0 == std::memcmp(&mParam, &other.mParam, sizeof(effect_param_t));
+  }
+
+  /**
+   * Padding psize to 32 bits aligned, because "start of value field inside
+   * the data field is always on a 32 bits boundary".
+   */
+  static constexpr inline size_t padding(size_t size) {
+    return ((size - 1) / sizeof(int32_t) + 1) * sizeof(int32_t);
+  }
+
+ private:
+  /* member with variable sized type at end of class */
+  const effect_param_t& mParam;
+};
+
+/**
+ * Reader class of effect_param_t data buffer.
+ */
+class EffectParamReader : public EffectParamWrapper {
+ public:
+  explicit EffectParamReader(effect_param_t& param)
+      : EffectParamWrapper(param), mValueROffset(getPaddedParameterSize()) {}
+
+  /**
+   * Read n value of type T from data to buf, mParamROffset will advance n * sizeof(T) if success,
+   * no advance if failure.
+   */
+  template <typename T>
+  status_t readFromParameter(T* buf, size_t n = 1) {
+    size_t len = n * sizeof(T);
+    status_t ret = readFromData(buf, len, mParamROffset /* param offset */,
+                                getParameterSize() /* max offset */);
+    if (OK == ret) {
+       mParamROffset += len;
+    }
+    return ret;
+  }
+
+  /**
+   * Read number of value in type T from data to buf, mValueROffset will advance n * sizeof(T) if
+   * success, no advance if failure.
+   */
+  template <typename T>
+  status_t readFromValue(T* buf, size_t n = 1) {
+    size_t len = n * sizeof(T);
+    status_t ret = readFromData(buf, len, mValueROffset /* data offset */,
+                                getPaddedParameterSize() + getValueSize() /* max offset */);
+    if (OK == ret) {
+       mValueROffset += len;
+    }
+    return ret;
+  }
+
+  std::string toString() const {
+    std::ostringstream os;
+    os << EffectParamWrapper::toString();
+    os << ", paramROffset: " << mParamROffset;
+    os << ", valueROffset: " << mValueROffset;
+    return os.str();
+  }
+
+ private:
+  size_t mParamROffset = 0;
+  size_t mValueROffset = 0;
+};
+
+/**
+ * Writer class of effect_param_t data buffer.
+ */
+class EffectParamWriter : public EffectParamReader {
+ public:
+  explicit EffectParamWriter(effect_param_t& param)
+      : EffectParamReader(param), mParam(param), mValueWOffset(getPaddedParameterSize()) {
+  }
+
+  /**
+   * Write n number of param in type T from buf to data, mParamWOffset will advance n * sizeof(T)
+   * if success, no advance if failure.
+   */
+  template <typename T>
+  status_t writeToParameter(const T* buf, size_t n = 1) {
+    size_t len = n * sizeof(T);
+    status_t ret = writeToData(buf, len, mParamWOffset /* data offset */,
+                               getParameterSize() /* max */);
+    if (OK == ret) {
+       mParamWOffset += len;
+    }
+    return ret;
+  }
+
+  status_t writeToData(const void* buf, size_t len, size_t offset, size_t max) {
+    return copyDataWithCheck(mParam.data + offset, buf, len, offset, max);
+  }
+  /**
+   * Write n number of value in type T from buf to data, mValueWOffset will advance n * sizeof(T)
+   * if success, no advance if failure.
+   */
+  template <typename T>
+  status_t writeToValue(const T* buf, size_t n = 1) {
+    size_t len = n * sizeof(T);
+    status_t ret = writeToData(buf, len, mValueWOffset /* data offset */,
+                               getPaddedParameterSize() + getValueSize() /* max */);
+    if (OK == ret) {
+       mValueWOffset += len;
+    }
+    return ret;
+  }
+
+  /**
+   * Set the current value write offset to vsize.
+   * Together with getTotalSize(), can be used by getParameter to set replySize.
+   */
+  void finishValueWrite() { mParam.vsize = mValueWOffset - getPaddedParameterSize(); }
+
+  void setStatus(status_t status) { mParam.status = status; }
+
+  std::string toString() const {
+    std::ostringstream os;
+    os << EffectParamReader::toString();
+    os << ", paramWOffset: " << mParamWOffset;
+    os << ", valueWOffset: " << mValueWOffset;
+    return os.str();
+  }
+
+ private:
+  effect_param_t& mParam;
+  size_t mParamWOffset = 0;
+  size_t mValueWOffset = 0;
+};
+
+}  // namespace utils
+}  // namespace effect
+}  // namespace android
diff --git a/audio/include/system/audio_effects/effect_spatializer.h b/audio/include/system/audio_effects/effect_spatializer.h
index 2f853ea..971d0e3 100644
--- a/audio/include/system/audio_effects/effect_spatializer.h
+++ b/audio/include/system/audio_effects/effect_spatializer.h
@@ -47,8 +47,27 @@
     SPATIALIZER_PARAM_HEAD_TO_STAGE,
     // foldable device hinge angle as a float value in rad
     SPATIALIZER_PARAM_HINGE_ANGLE,
-    // Display orientation as a float value in rad
+
+    // The default display orientation as reported by DisplayManager.
+    //
+    // The DisplayManager reports 0, 90, 180, 270 degrees,
+    // here it is reported as a float value in radians.
+    // Only 4 values 0, PI/2, PI, 3PI/2 will be sent as of Android 14 (U).
+    // Due to precision, compare with an epsilon range, suggest
+    // rounding to the nearest integer degree for practical use.
+    //
+    // Notes:
+    //    1) A device may have more than one display.
+    //    2) A display may be locked which prevents the application from rotating.
     SPATIALIZER_PARAM_DISPLAY_ORIENTATION,
+
+    // The fold state as reported by DeviceStateManager for a foldable.
+    // This is an integer value of either 0 (open) or 1 (folded).
+    //
+    // The device fold state may affect which display is active (if any).
+    // The open/closed logical state differs than the hinge angle,
+    // which may be reported by a hinge sensor.
+    SPATIALIZER_PARAM_FOLD_STATE,
 } t_virtualizer_stage_params;
 
 // See SpatializationLevel.aidl
diff --git a/audio/include/system/audio_effects/effect_uuid.h b/audio/include/system/audio_effects/effect_uuid.h
new file mode 100644
index 0000000..9f71dbe
--- /dev/null
+++ b/audio/include/system/audio_effects/effect_uuid.h
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_UUID_H_
+#define ANDROID_EFFECT_UUID_H_
+
+#include <aidl/android/hardware/audio/effect/Descriptor.h>
+#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <android-base/no_destructor.h>
+
+namespace aidl::android::hardware::audio::effect {
+
+using ::aidl::android::media::audio::common::AudioUuid;
+
+inline AudioUuid stringToUuid(const char* str) {
+    AudioUuid uuid{};
+    uint32_t tmp[10];
+    if (!str || sscanf(str, "%08x-%04x-%04x-%04x-%02x%02x%02x%02x%02x%02x", tmp,
+                       tmp + 1, tmp + 2, tmp + 3, tmp + 4, tmp + 5, tmp + 6,
+                       tmp + 7, tmp + 8, tmp + 9) < 10) {
+      return uuid;
+    }
+
+    uuid.timeLow = (uint32_t)tmp[0];
+    uuid.timeMid = (uint16_t)tmp[1];
+    uuid.timeHiAndVersion = (uint16_t)tmp[2];
+    uuid.clockSeq = (uint16_t)tmp[3];
+    uuid.node.insert(uuid.node.end(), {(uint8_t)tmp[4], (uint8_t)tmp[5], (uint8_t)tmp[6],
+                                       (uint8_t)tmp[7], (uint8_t)tmp[8], (uint8_t)tmp[9]});
+    return uuid;
+}
+
+inline const char* const& kEffectTypeUuidAcousticEchoCanceler =
+    Descriptor::EFFECT_TYPE_UUID_AEC;
+inline const char* const& kEffectTypeUuidAutomaticGainControlV1 =
+    Descriptor::EFFECT_TYPE_UUID_AGC1;
+inline const char* const& kEffectTypeUuidAutomaticGainControlV2 =
+    Descriptor::EFFECT_TYPE_UUID_AGC2;
+inline const char* const& kEffectTypeUuidBassBoost =
+    Descriptor::EFFECT_TYPE_UUID_BASS_BOOST;
+inline const char* const& kEffectTypeUuidDownmix =
+    Descriptor::EFFECT_TYPE_UUID_DOWNMIX;
+inline const char* const& kEffectTypeUuidDynamicsProcessing =
+    Descriptor::EFFECT_TYPE_UUID_DYNAMICS_PROCESSING;
+inline const char* const& kEffectTypeUuidEnvReverb =
+    Descriptor::EFFECT_TYPE_UUID_ENV_REVERB;
+inline const char* const& kEffectTypeUuidEqualizer =
+    Descriptor::EFFECT_TYPE_UUID_EQUALIZER;
+inline const char* const& kEffectTypeUuidHapticGenerator =
+    Descriptor::EFFECT_TYPE_UUID_HAPTIC_GENERATOR;
+inline const char* const& kEffectTypeUuidLoudnessEnhancer =
+    Descriptor::EFFECT_TYPE_UUID_LOUDNESS_ENHANCER;
+inline const char* const& kEffectTypeUuidNoiseSuppression =
+    Descriptor::EFFECT_TYPE_UUID_NS;
+inline const char* const& kEffectTypeUuidPresetReverb =
+    Descriptor::EFFECT_TYPE_UUID_PRESET_REVERB;
+inline const char* const& kEffectTypeUuidSpatializer =
+    Descriptor::EFFECT_TYPE_UUID_SPATIALIZER;
+inline const char* const& kEffectTypeUuidVirtualizer =
+    Descriptor::EFFECT_TYPE_UUID_VIRTUALIZER;
+inline const char* const& kEffectTypeUuidVisualizer =
+    Descriptor::EFFECT_TYPE_UUID_VISUALIZER;
+inline const char* const& kEffectTypeUuidVolume =
+    Descriptor::EFFECT_TYPE_UUID_VOLUME;
+
+constexpr char kEffectImplUuidAcousticEchoCancelerSw[] = "bb392ec0-8d4d-11e0-a896-0002a5d5c51b";
+constexpr char kEffectImplUuidAutomaticGainControlV1Sw[] = "aa8130e0-66fc-11e0-bad0-0002a5d5c51b";
+constexpr char kEffectImplUuidAutomaticGainControlV2Sw[] = "89f38e65-d4d2-4d64-ad0e-2b3e799ea886";
+constexpr char kEffectImplUuidBassBoostSw[] = "fa8181f2-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidBassBoostBundle[] = "8631f300-72e2-11df-b57e-0002a5d5c51b";
+constexpr char kEffectImplUuidBassBoostProxy[] = "14804144-a5ee-4d24-aa88-0002a5d5c51b";
+constexpr char kEffectImplUuidDownmixSw[] = "fa8187ba-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidDownmix[] = "93f04452-e4fe-41cc-91f9-e475b6d1d69f";
+constexpr char kEffectImplUuidDynamicsProcessingSw[] = "fa818d78-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidDynamicsProcessing[] = "e0e6539b-1781-7261-676f-6d7573696340";
+constexpr char kEffectImplUuidEqualizerSw[] = "0bed4300-847d-11df-bb17-0002a5d5c51b";
+constexpr char kEffectImplUuidEqualizerBundle[] = "ce772f20-847d-11df-bb17-0002a5d5c51b";
+constexpr char kEffectImplUuidEqualizerProxy[] = "c8e70ecd-48ca-456e-8a4f-0002a5d5c51b";
+constexpr char kEffectImplUuidHapticGeneratorSw[] = "fa819110-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidHapticGenerator[] = "97c4acd1-8b82-4f2f-832e-c2fe5d7a9931";
+constexpr char kEffectImplUuidLoudnessEnhancerSw[] = "fa819610-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidLoudnessEnhancer[] = "fa415329-2034-4bea-b5dc-5b381c8d1e2c";
+constexpr char kEffectImplUuidEnvReverbSw[] = "fa819886-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidAuxEnvReverb[] = "4a387fc0-8ab3-11df-8bad-0002a5d5c51b";
+constexpr char kEffectImplUuidInsertEnvReverb[] = "c7a511a0-a3bb-11df-860e-0002a5d5c51b";
+constexpr char kEffectImplUuidNoiseSuppressionSw[] = "c06c8400-8e06-11e0-9cb6-0002a5d5c51b";
+constexpr char kEffectImplUuidPresetReverbSw[] = "fa8199c6-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidAuxPresetReverb[] = "f29a1400-a3bb-11df-8ddc-0002a5d5c51b";
+constexpr char kEffectImplUuidInsertPresetReverb[] = "172cdf00-a3bc-11df-a72f-0002a5d5c51b";
+constexpr char kEffectImplUuidVirtualizerSw[] = "fa819d86-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidVirtualizerBundle[] = "1d4033c0-8557-11df-9f2d-0002a5d5c51b";
+constexpr char kEffectImplUuidVirtualizerProxy[] = "d3467faa-acc7-4d34-acaf-0002a5d5c51b";
+constexpr char kEffectImplUuidVisualizerSw[] = "fa81a0f6-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidVisualizer[] = "d069d9e0-8329-11df-9168-0002a5d5c51b";
+constexpr char kEffectImplUuidVisualizerProxy[] = "1d0a1a53-7d5d-48f2-8e71-27fbd10d842c";
+constexpr char kEffectImplUuidVolumeSw[] = "fa81a718-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectImplUuidVolumeBundle[] = "119341a0-8469-11df-81f9-0002a5d5c51b";
+
+constexpr char kEffectUuidNull[] = "ec7178ec-e5e1-4432-a3f4-4657e6795210";
+constexpr char kEffectUuidZero[] = "00000000-0000-0000-0000-000000000000";
+constexpr char kEffectUuidExtensionType[] = "fa81dbde-588b-11ed-9b6a-0242ac120002";
+constexpr char kEffectUuidExtensionImpl[] = "fa81dd00-588b-11ed-9b6a-0242ac120002";
+
+#define EFFECT_TYPE_UUID_LIST_DEF(V)  \
+    V(TypeUuidAcousticEchoCanceler)   \
+    V(TypeUuidAutomaticGainControlV1) \
+    V(TypeUuidAutomaticGainControlV2) \
+    V(TypeUuidBassBoost)              \
+    V(TypeUuidDownmix)                \
+    V(TypeUuidDynamicsProcessing)     \
+    V(TypeUuidEqualizer)              \
+    V(TypeUuidHapticGenerator)        \
+    V(TypeUuidLoudnessEnhancer)       \
+    V(TypeUuidEnvReverb)              \
+    V(TypeUuidPresetReverb)           \
+    V(TypeUuidNoiseSuppression)       \
+    V(TypeUuidSpatializer)            \
+    V(TypeUuidVirtualizer)            \
+    V(TypeUuidVisualizer)             \
+    V(TypeUuidVolume)
+
+#define EFFECT_IMPL_UUID_LIST_DEF(V)    \
+    V(ImplUuidAcousticEchoCancelerSw)   \
+    V(ImplUuidAutomaticGainControlV1Sw) \
+    V(ImplUuidAutomaticGainControlV2Sw) \
+    V(ImplUuidBassBoostSw)              \
+    V(ImplUuidBassBoostBundle)          \
+    V(ImplUuidBassBoostProxy)           \
+    V(ImplUuidDownmixSw)                \
+    V(ImplUuidDownmix)                  \
+    V(ImplUuidDynamicsProcessingSw)     \
+    V(ImplUuidDynamicsProcessing)       \
+    V(ImplUuidEqualizerSw)              \
+    V(ImplUuidEqualizerBundle)          \
+    V(ImplUuidEqualizerProxy)           \
+    V(ImplUuidHapticGeneratorSw)        \
+    V(ImplUuidHapticGenerator)          \
+    V(ImplUuidLoudnessEnhancerSw)       \
+    V(ImplUuidLoudnessEnhancer)         \
+    V(ImplUuidEnvReverbSw)              \
+    V(ImplUuidAuxEnvReverb)             \
+    V(ImplUuidInsertEnvReverb)          \
+    V(ImplUuidNoiseSuppressionSw)       \
+    V(ImplUuidPresetReverbSw)           \
+    V(ImplUuidAuxPresetReverb)          \
+    V(ImplUuidInsertPresetReverb)       \
+    V(ImplUuidVirtualizerSw)            \
+    V(ImplUuidVirtualizerBundle)        \
+    V(ImplUuidVirtualizerProxy)         \
+    V(ImplUuidVisualizerSw)             \
+    V(ImplUuidVisualizer)               \
+    V(ImplUuidVisualizerProxy)          \
+    V(ImplUuidVolumeSw)                 \
+    V(ImplUuidVolumeBundle)
+
+#define EFFECT_OTHER_UUID_LIST_DEF(V) \
+    V(UuidNull)                           \
+    V(UuidZero)                           \
+    V(UuidExtensionType)                  \
+    V(UuidExtensionImpl)
+
+#define GENERATE_UUID_GETTER_V(symbol)                            \
+    inline const AudioUuid& getEffect##symbol() {                 \
+      static const ::android::base::NoDestructor<AudioUuid> uuid( \
+          stringToUuid(kEffect##symbol));                         \
+      return *uuid;                                               \
+    }
+
+// Generate all the UUID getter functions:
+EFFECT_TYPE_UUID_LIST_DEF(GENERATE_UUID_GETTER_V)
+
+EFFECT_IMPL_UUID_LIST_DEF(GENERATE_UUID_GETTER_V)
+
+EFFECT_OTHER_UUID_LIST_DEF(GENERATE_UUID_GETTER_V)
+
+}  // namespace aidl::android::hardware::audio::effect
+
+#endif  // ANDROID_EFFECT_UUID_H_
\ No newline at end of file
diff --git a/audio_utils/Android.bp b/audio_utils/Android.bp
index d88756d..41817b3 100644
--- a/audio_utils/Android.bp
+++ b/audio_utils/Android.bp
@@ -51,6 +51,8 @@
         "fifo_writer_T.cpp",
         "format.c",
         "limiter.c",
+        "MelAggregator.cpp",
+        "MelProcessor.cpp",
         "Metadata.cpp",
         "minifloat.c",
         "mono_blend.cpp",
@@ -59,6 +61,7 @@
         "primitives.c",
         "roundup.c",
         "sample.c",
+        "hal_smoothness.c",
     ],
 
     header_libs: [
@@ -74,6 +77,11 @@
     shared_libs: [
         "libcutils",
         "liblog",
+        "libutils",
+    ],
+
+    whole_static_libs: [
+        "libaudioutils_fastmath",
     ],
 
     target: {
@@ -104,6 +112,42 @@
 }
 
 cc_library_static {
+    name: "libaudioutils_fastmath",
+    vendor_available: true,
+    product_available: true,
+    vndk: {
+        enabled: true,
+    },
+    double_loadable: true,
+    host_supported: true,
+    defaults: ["audio_utils_defaults"],
+
+    srcs: [
+        "ChannelMix.cpp",
+    ],
+
+    header_libs: [
+        "libaudio_system_headers",
+        "libutils_headers",
+    ],
+    min_sdk_version: "29",
+    shared_libs: [
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    cflags: [
+        "-Werror",
+        "-ffast-math",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+}
+
+cc_library_static {
     name: "libaudioutils_fixedfft",
     vendor_available: true,
     product_available: true,
diff --git a/audio_utils/ChannelMix.cpp b/audio_utils/ChannelMix.cpp
new file mode 100644
index 0000000..d8ee72d
--- /dev/null
+++ b/audio_utils/ChannelMix.cpp
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <audio_utils/ChannelMix.h>
+
+namespace android::audio_utils::channels {
+
+/**
+ * The ChannelMix code relies on sparse matrix optimization for speed.
+ *
+ * This requires -ffast-math to be specified.
+ */
+
+
+/*
+ Implementation detail:
+
+ We "compute" the channel mix matrix by constexpr computation,
+ but alternatively we could use a straight 2D array initialization.
+
+ The thought is that the channel mix matrix by computation is easier
+ to keep consistent with modifications.
+ */
+
+namespace {
+// A container for the channel matrix
+template <audio_channel_mask_t INPUT_CHANNEL_MASK, audio_channel_mask_t OUTPUT_CHANNEL_MASK>
+struct ChannelMatrixContainer {
+    static inline constexpr size_t INPUT_CHANNEL_COUNT =
+            audio_channel_count_from_out_mask(INPUT_CHANNEL_MASK);
+    static inline constexpr size_t OUTPUT_CHANNEL_COUNT =
+            audio_channel_count_from_out_mask(OUTPUT_CHANNEL_MASK);
+    float f[INPUT_CHANNEL_COUNT][OUTPUT_CHANNEL_COUNT];
+};
+
+template <audio_channel_mask_t INPUT_CHANNEL_MASK, audio_channel_mask_t OUTPUT_CHANNEL_MASK>
+constexpr ChannelMatrixContainer<INPUT_CHANNEL_MASK, OUTPUT_CHANNEL_MASK> computeMatrix() {
+    ChannelMatrixContainer<INPUT_CHANNEL_MASK, OUTPUT_CHANNEL_MASK> channelMatrix{};
+    // Compiler bug: cannot check result of this through static_assert.
+    (void)fillChannelMatrix<OUTPUT_CHANNEL_MASK>(INPUT_CHANNEL_MASK, channelMatrix.f);
+    return channelMatrix;
+}
+
+} // namespace
+
+/**
+ * Remixes a multichannel signal of specified number of channels
+ *
+ * INPUT_CHANNEL_MASK the src input.
+ * OUTPUT_CHANNEL_MASK the dst output.
+ * ACCUMULATE is true if the remix is added to the destination or
+ *               false if the remix replaces the destination.
+ *
+ * \param src          multichannel audio buffer to remix
+ * \param dst          remixed stereo audio samples
+ * \param frameCount   number of multichannel frames to remix
+ *
+ * \return false if the CHANNEL_COUNT is not supported.
+ */
+template <audio_channel_mask_t INPUT_CHANNEL_MASK,
+        audio_channel_mask_t OUTPUT_CHANNEL_MASK, bool ACCUMULATE>
+bool sparseChannelMatrixMultiply(const float *src, float *dst, size_t frameCount) {
+    static constexpr auto s = computeMatrix<INPUT_CHANNEL_MASK, OUTPUT_CHANNEL_MASK>();
+
+    // matrix multiply
+    if (INPUT_CHANNEL_MASK == AUDIO_CHANNEL_NONE) return false;
+    for (;frameCount > 0; --frameCount) {
+        float ch[s.OUTPUT_CHANNEL_COUNT]{};
+        #pragma unroll
+        for (size_t i = 0; i < s.INPUT_CHANNEL_COUNT; ++i) {
+            const float (&array)[s.OUTPUT_CHANNEL_COUNT] = s.f[i];
+            #pragma unroll
+            for (size_t j = 0; j < s.OUTPUT_CHANNEL_COUNT; ++j) {
+                ch[j] += array[j] * src[i];
+            }
+        }
+        if constexpr (ACCUMULATE) {
+            #pragma unroll
+            for (size_t j = 0; j < s.OUTPUT_CHANNEL_COUNT; ++j) {
+                ch[j] += dst[j];
+            }
+        }
+        #pragma unroll
+        for (size_t j = 0; j < s.OUTPUT_CHANNEL_COUNT; ++j) {
+            dst[j] = clamp(ch[j]);
+        }
+        src += s.INPUT_CHANNEL_COUNT;
+        dst += s.OUTPUT_CHANNEL_COUNT;
+    }
+    return true;
+}
+
+// Create accelerated instances
+
+#define INSTANTIATE(INPUT_MASK, OUTPUT_MASK) \
+template bool \
+sparseChannelMatrixMultiply<INPUT_MASK, OUTPUT_MASK, true>( \
+        const float *src, float *dst, size_t frameCount); \
+template bool \
+sparseChannelMatrixMultiply<INPUT_MASK, OUTPUT_MASK, false>( \
+        const float *src, float *dst, size_t frameCount); \
+
+#define INSTANTIATE_MASKS(CHANNEL) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_STEREO, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_QUAD_BACK, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_5POINT1_BACK, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_7POINT1, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_5POINT1POINT2, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_5POINT1POINT4, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_7POINT1POINT2, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_7POINT1POINT4, (CHANNEL)) \
+INSTANTIATE(AUDIO_CHANNEL_OUT_22POINT2, (CHANNEL))
+
+INSTANTIATE_MASKS(AUDIO_CHANNEL_OUT_STEREO)
+INSTANTIATE_MASKS(AUDIO_CHANNEL_OUT_5POINT1)
+INSTANTIATE_MASKS(AUDIO_CHANNEL_OUT_7POINT1)
+INSTANTIATE_MASKS(AUDIO_CHANNEL_OUT_7POINT1POINT4)
+
+/* static */
+std::shared_ptr<IChannelMix> IChannelMix::create(audio_channel_mask_t outputChannelMask) {
+     switch (outputChannelMask) {
+     case AUDIO_CHANNEL_OUT_STEREO:
+         return std::make_shared<ChannelMix<AUDIO_CHANNEL_OUT_STEREO>>();
+     case AUDIO_CHANNEL_OUT_5POINT1:
+         return std::make_shared<ChannelMix<AUDIO_CHANNEL_OUT_5POINT1>>();
+     case AUDIO_CHANNEL_OUT_7POINT1:
+         return std::make_shared<ChannelMix<AUDIO_CHANNEL_OUT_7POINT1>>();
+     case AUDIO_CHANNEL_OUT_7POINT1POINT4:
+         return std::make_shared<ChannelMix<AUDIO_CHANNEL_OUT_7POINT1POINT4>>();
+     default:
+         return {};
+     }
+}
+
+/* static */
+bool IChannelMix::isOutputChannelMaskSupported(audio_channel_mask_t outputChannelMask) {
+    switch (outputChannelMask) {
+    case AUDIO_CHANNEL_OUT_STEREO:
+    case AUDIO_CHANNEL_OUT_5POINT1:
+    case AUDIO_CHANNEL_OUT_7POINT1:
+    case AUDIO_CHANNEL_OUT_7POINT1POINT4:
+        return true;
+    default:
+        return false;
+    }
+}
+
+} // android::audio_utils::channels
diff --git a/audio_utils/MelAggregator.cpp b/audio_utils/MelAggregator.cpp
new file mode 100644
index 0000000..bec6990
--- /dev/null
+++ b/audio_utils/MelAggregator.cpp
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_MelAggregator"
+
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/power.h>
+#include <cinttypes>
+#include <iterator>
+#include <utils/Log.h>
+
+namespace android::audio_utils {
+namespace {
+
+/** Min value after which the MEL values are aggregated to CSD. */
+constexpr float kMinCsdRecordToStore = 0.01f;
+
+/** Threshold for 100% CSD expressed in Pa^2s. */
+constexpr float kCsdThreshold = 5760.0f; // 1.6f(Pa^2h) * 3600.0f(s);
+
+/** Reference energy used for dB calculation in Pa^2. */
+constexpr float kReferenceEnergyPa = 4e-10;
+
+/**
+ * Checking the intersection of the time intervals of v1 and v2. Each MelRecord v
+ * spawns an interval [t1, t2) if and only if:
+ *    v.timestamp == t1 && v.mels.size() == t2 - t1
+ **/
+std::pair<int64_t, int64_t> intersectRegion(const MelRecord& v1, const MelRecord& v2)
+{
+    const int64_t maxStart = std::max(v1.timestamp, v2.timestamp);
+    const int64_t v1End = v1.timestamp + v1.mels.size();
+    const int64_t v2End = v2.timestamp + v2.mels.size();
+    const int64_t minEnd = std::min(v1End, v2End);
+    return {maxStart, minEnd};
+}
+
+float aggregateMels(const float mel1, const float mel2) {
+    return audio_utils_power_from_energy(powf(10.f, mel1 / 10.f) + powf(10.f, mel2 / 10.f));
+}
+
+float averageMelEnergy(const float mel1,
+                       const int64_t duration1,
+                       const float mel2,
+                       const int64_t duration2) {
+    return audio_utils_power_from_energy((powf(10.f, mel1 / 10.f) * duration1
+        + powf(10.f, mel2 / 10.f) * duration2) / (duration1 + duration2));
+}
+
+float melToCsd(float mel) {
+    float energy = powf(10.f, mel / 10.0f);
+    return kReferenceEnergyPa * energy / kCsdThreshold;
+}
+
+CsdRecord createRevertedRecord(const CsdRecord& record) {
+    return {record.timestamp, record.duration, -record.value, record.averageMel};
+}
+
+}  // namespace
+
+int64_t MelAggregator::csdTimeIntervalStored_l()
+{
+    return mCsdRecords.rbegin()->second.timestamp + mCsdRecords.rbegin()->second.duration
+        - mCsdRecords.begin()->second.timestamp;
+}
+
+std::map<int64_t, CsdRecord>::iterator MelAggregator::addNewestCsdRecord_l(int64_t timestamp,
+                                                                           int64_t duration,
+                                                                           float csdRecord,
+                                                                           float averageMel)
+{
+    ALOGV("%s: add new csd[%" PRId64 ", %" PRId64 "]=%f for MEL avg %f",
+                      __func__,
+                      timestamp,
+                      duration,
+                      csdRecord,
+                      averageMel);
+
+    mCurrentCsd += csdRecord;
+    return mCsdRecords.emplace_hint(mCsdRecords.end(),
+                                    timestamp,
+                                    CsdRecord(timestamp,
+                                              duration,
+                                              csdRecord,
+                                              averageMel));
+}
+
+void MelAggregator::removeOldCsdRecords_l(std::vector<CsdRecord>& removeRecords) {
+    // Remove older CSD values
+    while (!mCsdRecords.empty() && csdTimeIntervalStored_l() > mCsdWindowSeconds) {
+        mCurrentCsd -= mCsdRecords.begin()->second.value;
+        removeRecords.emplace_back(createRevertedRecord(mCsdRecords.begin()->second));
+        mCsdRecords.erase(mCsdRecords.begin());
+    }
+}
+
+std::vector<CsdRecord> MelAggregator::updateCsdRecords_l()
+{
+    std::vector<CsdRecord> newRecords;
+
+    // only update if we are above threshold
+    if (mCurrentMelRecordsCsd < kMinCsdRecordToStore) {
+        removeOldCsdRecords_l(newRecords);
+        return newRecords;
+    }
+
+    float converted = 0.f;
+    float averageMel = 0.f;
+    float csdValue = 0.f;
+    int64_t duration = 0;
+    int64_t timestamp = mMelRecords.begin()->first;
+    for (const auto& storedMel: mMelRecords) {
+        int melsIdx = 0;
+        for (const auto& mel: storedMel.second.mels) {
+            averageMel = averageMelEnergy(averageMel, duration, mel, 1.f);
+            csdValue += melToCsd(mel);
+            ++duration;
+            if (csdValue >= kMinCsdRecordToStore
+                && mCurrentMelRecordsCsd - converted - csdValue >= kMinCsdRecordToStore) {
+                auto it = addNewestCsdRecord_l(timestamp,
+                                               duration,
+                                               csdValue,
+                                               averageMel);
+                newRecords.emplace_back(it->second);
+
+                duration = 0;
+                averageMel = 0.f;
+                converted += csdValue;
+                csdValue = 0.f;
+                timestamp = storedMel.first + melsIdx;
+            }
+            ++ melsIdx;
+        }
+    }
+
+    if(csdValue > 0) {
+        auto it = addNewestCsdRecord_l(timestamp,
+                                       duration,
+                                       csdValue,
+                                       averageMel);
+        newRecords.emplace_back(it->second);
+    }
+
+    removeOldCsdRecords_l(newRecords);
+
+    // reset mel values
+    mCurrentMelRecordsCsd = 0.0f;
+    mMelRecords.clear();
+
+    return newRecords;
+}
+
+std::vector<CsdRecord> MelAggregator::aggregateAndAddNewMelRecord(const MelRecord& mel)
+{
+    std::lock_guard _l(mLock);
+    return aggregateAndAddNewMelRecord_l(mel);
+}
+
+std::vector<CsdRecord> MelAggregator::aggregateAndAddNewMelRecord_l(const MelRecord& mel)
+{
+    for (const auto& m : mel.mels) {
+        mCurrentMelRecordsCsd += melToCsd(m);
+    }
+    ALOGV("%s: current mel values CSD %f", __func__, mCurrentMelRecordsCsd);
+
+    auto mergeIt = mMelRecords.lower_bound(mel.timestamp);
+
+    if (mergeIt != mMelRecords.begin()) {
+        auto prevMergeIt = std::prev(mergeIt);
+        if (prevMergeIt->second.overlapsEnd(mel)) {
+            mergeIt = prevMergeIt;
+        }
+    }
+
+    int64_t newTimestamp = mel.timestamp;
+    std::vector<float> newMels = mel.mels;
+    auto mergeStart = mergeIt;
+    int overlapStart = 0;
+    while(mergeIt != mMelRecords.end()) {
+        const auto& [melRecordStart, melRecord] = *mergeIt;
+        const auto [regionStart, regionEnd] = intersectRegion(melRecord, mel);
+        if (regionStart >= regionEnd) {
+            // no intersection
+            break;
+        }
+
+        if (melRecordStart < regionStart) {
+            newTimestamp = melRecordStart;
+            overlapStart = regionStart - melRecordStart;
+            newMels.insert(newMels.begin(), melRecord.mels.begin(),
+                           melRecord.mels.begin() + overlapStart);
+        }
+
+        for (int64_t aggregateTime = regionStart; aggregateTime < regionEnd; ++aggregateTime) {
+            const int offsetStored = aggregateTime - melRecordStart;
+            const int offsetNew = aggregateTime - mel.timestamp;
+            newMels[overlapStart + offsetNew] =
+                aggregateMels(melRecord.mels[offsetStored], mel.mels[offsetNew]);
+        }
+
+        const int64_t mergeEndTime = melRecordStart + melRecord.mels.size();
+        if (mergeEndTime > regionEnd) {
+            newMels.insert(newMels.end(),
+                           melRecord.mels.end() - mergeEndTime + regionEnd,
+                           melRecord.mels.end());
+        }
+
+        ++mergeIt;
+    }
+
+    auto hint = mergeIt;
+    if (mergeStart != mergeIt) {
+        hint = mMelRecords.erase(mergeStart, mergeIt);
+    }
+
+    mMelRecords.emplace_hint(hint,
+                             newTimestamp,
+                             MelRecord(mel.portId, newMels, newTimestamp));
+
+    return updateCsdRecords_l();
+}
+
+void MelAggregator::reset(float newCsd, const std::vector<CsdRecord>& newRecords)
+{
+    std::lock_guard _l(mLock);
+    mCsdRecords.clear();
+    mMelRecords.clear();
+
+    mCurrentCsd = newCsd;
+    for (const auto& record : newRecords) {
+        mCsdRecords.emplace_hint(mCsdRecords.end(), record.timestamp, record);
+    }
+}
+
+size_t MelAggregator::getCachedMelRecordsSize() const
+{
+    std::lock_guard _l(mLock);
+    return mMelRecords.size();
+}
+
+void MelAggregator::foreachCachedMel(const std::function<void(const MelRecord&)>& f) const
+{
+     std::lock_guard _l(mLock);
+     for (const auto &melRecord : mMelRecords) {
+         f(melRecord.second);
+     }
+}
+
+float MelAggregator::getCsd() {
+    std::lock_guard _l(mLock);
+    return mCurrentCsd;
+}
+
+size_t MelAggregator::getCsdRecordsSize() const {
+    std::lock_guard _l(mLock);
+    return mCsdRecords.size();
+}
+
+void MelAggregator::foreachCsd(const std::function<void(const CsdRecord&)>& f) const
+{
+     std::lock_guard _l(mLock);
+     for (const auto &csdRecord : mCsdRecords) {
+         f(csdRecord.second);
+     }
+}
+
+}  // namespace android::audio_utils
diff --git a/audio_utils/MelProcessor.cpp b/audio_utils/MelProcessor.cpp
new file mode 100644
index 0000000..6c0aa2c
--- /dev/null
+++ b/audio_utils/MelProcessor.cpp
@@ -0,0 +1,414 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_MelProcessor"
+// #define VERY_VERY_VERBOSE_LOGGING
+#ifdef VERY_VERY_VERBOSE_LOGGING
+#define ALOGVV ALOGV
+#else
+#define ALOGVV(a...) do { } while(0)
+#endif
+
+#include <audio_utils/MelProcessor.h>
+
+#include <audio_utils/format.h>
+#include <audio_utils/power.h>
+#include <log/log.h>
+#include <sstream>
+#include <utils/threads.h>
+
+namespace android::audio_utils {
+
+constexpr int32_t kSecondsPerMelValue = 1;
+constexpr float kMelAdjustmentDb = -3.f;
+
+// Estimated offset defined in Table39 of IEC62368-1 3rd edition
+// -30dBFS, -10dBFS should correspond to 80dBSPL, 100dBSPL
+constexpr float kMeldBFSTodBSPLOffset = 110.f;
+
+constexpr float kRs1OutputdBFS = 80.f;  // dBA
+
+constexpr float kRs2LowerBound = 80.f;  // dBA
+constexpr float kRs2UpperBound = 100.f;  // dBA
+
+// The following arrays contain the IIR biquad filter coefficients for performing A-weighting as
+// described in IEC 61672:2003 for samples with 44.1kHz and 48kHz.
+constexpr std::array<std::array<float, kBiquadNumCoefs>, 2> kBiquadCoefs1 =
+    {{/* 44.1kHz= */{0.95616638497, -1.31960414122, 0.36343775625, -1.31861375911, 0.32059452332},
+      /* 48kHz= */{0.96525096525, -1.34730163086, 0.38205066561, -1.34730722798, 0.34905752979}}};
+constexpr std::array<std::array<float, kBiquadNumCoefs>, 2> kBiquadCoefs2 =
+    {{/* 44.1kHz= */{0.94317138580, -1.88634277160, 0.94317138580, -1.88558607420, 0.88709946900},
+      /* 48kHz= */{0.94696969696, -1.89393939393, 0.94696969696, -1.89387049481, 0.89515976917}}};
+constexpr std::array<std::array<float, kBiquadNumCoefs>, 2> kBiquadCoefs3 =
+    {{/* 44.1kHz= */{0.69736775447, -0.42552769920, -0.27184005527, -1.31859445445, 0.32058831623},
+      /* 48kHz= */{0.64666542810, -0.38362237137, -0.26304305672, -1.34730722798, 0.34905752979}}};
+
+MelProcessor::MelProcessor(uint32_t sampleRate,
+        uint32_t channelCount,
+        audio_format_t format,
+        const sp<MelCallback>& callback,
+        audio_port_handle_t deviceId,
+        float rs2Value,
+        size_t maxMelsCallback)
+    : mCallback(callback),
+      mMelWorker("MelWorker#" + pointerString(), mCallback),
+      mSampleRate(sampleRate),
+      mFramesPerMelValue(sampleRate * kSecondsPerMelValue),
+      mChannelCount(channelCount),
+      mFormat(format),
+      mAWeightSamples(mFramesPerMelValue * mChannelCount),
+      mFloatSamples(mFramesPerMelValue * mChannelCount),
+      mCurrentChannelEnergy(channelCount, 0.0f),
+      mMelValues(maxMelsCallback),
+      mCurrentIndex(0),
+      mDeviceId(deviceId),
+      mRs2UpperBound(rs2Value),
+      mCurrentSamples(0)
+{
+    createBiquads_l();
+
+    mMelWorker.run();
+}
+
+bool MelProcessor::isSampleRateSupported_l() const {
+    // For now only support 44.1 and 48kHz for Mel calculation
+    if (mSampleRate != 44100 && mSampleRate != 48000) {
+        return false;
+    }
+
+    return true;
+}
+
+void MelProcessor::createBiquads_l() {
+    if (!isSampleRateSupported_l()) {
+        return;
+    }
+
+    int coefsIndex = mSampleRate == 44100 ? 0 : 1;
+    mCascadedBiquads =
+              {std::make_unique<DefaultBiquadFilter>(mChannelCount, kBiquadCoefs1.at(coefsIndex)),
+               std::make_unique<DefaultBiquadFilter>(mChannelCount, kBiquadCoefs2.at(coefsIndex)),
+               std::make_unique<DefaultBiquadFilter>(mChannelCount, kBiquadCoefs3.at(coefsIndex))};
+}
+
+status_t MelProcessor::setOutputRs2UpperBound(float rs2Value)
+{
+    if (rs2Value < kRs2LowerBound || rs2Value > kRs2UpperBound) {
+        return BAD_VALUE;
+    }
+
+    mRs2UpperBound = rs2Value;
+
+    return NO_ERROR;
+}
+
+float MelProcessor::getOutputRs2UpperBound() const
+{
+    return mRs2UpperBound;
+}
+
+void MelProcessor::setDeviceId(audio_port_handle_t deviceId)
+{
+    mDeviceId = deviceId;
+}
+
+audio_port_handle_t MelProcessor::getDeviceId() {
+    return mDeviceId;
+}
+
+void MelProcessor::pause()
+{
+    ALOGV("%s", __func__);
+    mPaused = true;
+}
+
+void MelProcessor::resume()
+{
+    ALOGV("%s", __func__);
+    mPaused = false;
+}
+
+void MelProcessor::updateAudioFormat(uint32_t sampleRate,
+                                     uint32_t channelCount,
+                                     audio_format_t format) {
+    ALOGV("%s: update audio format %u, %u, %d", __func__, sampleRate, channelCount, format);
+
+    std::lock_guard l(mLock);
+
+    bool differentSampleRate = (mSampleRate != sampleRate);
+    bool differentChannelCount = (mChannelCount != channelCount);
+
+    mSampleRate = sampleRate;
+    mFramesPerMelValue = sampleRate * kSecondsPerMelValue;
+    mChannelCount = channelCount;
+    mFormat = format;
+
+    if (differentSampleRate || differentChannelCount) {
+        mAWeightSamples.resize(mFramesPerMelValue * mChannelCount);
+        mFloatSamples.resize(mFramesPerMelValue * mChannelCount);
+    }
+    if (differentChannelCount) {
+        mCurrentChannelEnergy.resize(channelCount);
+    }
+
+    createBiquads_l();
+}
+
+void MelProcessor::applyAWeight_l(const void* buffer, size_t samples)
+{
+    memcpy_by_audio_format(mFloatSamples.data(), AUDIO_FORMAT_PCM_FLOAT, buffer, mFormat, samples);
+
+    float* tempFloat[2] = { mFloatSamples.data(), mAWeightSamples.data() };
+    int inIdx = 1, outIdx = 0;
+    const size_t frames = samples / mChannelCount;
+    for (const auto& biquad : mCascadedBiquads) {
+        outIdx ^= 1;
+        inIdx ^= 1;
+        biquad->process(tempFloat[outIdx], tempFloat[inIdx], frames);
+    }
+
+    // should not be the case since the size is odd
+    if (!(mCascadedBiquads.size() & 1)) {
+        std::swap(mFloatSamples, mAWeightSamples);
+    }
+}
+
+float MelProcessor::getCombinedChannelEnergy_l() {
+    float combinedEnergy = 0.0f;
+    for (auto& energy: mCurrentChannelEnergy) {
+        combinedEnergy += energy;
+        energy = 0;
+    }
+
+    combinedEnergy /= (float) mFramesPerMelValue;
+    return combinedEnergy;
+}
+
+void MelProcessor::addMelValue_l(float mel) {
+    mMelValues[mCurrentIndex] = mel;
+    ALOGV("%s: writing MEL %f at index %d for device %d",
+          __func__,
+          mel,
+          mCurrentIndex,
+          mDeviceId.load());
+
+    bool notifyWorker = false;
+
+    if (mel > mRs2UpperBound) {
+        mMelWorker.momentaryExposure(mel, mDeviceId);
+        notifyWorker = true;
+    }
+
+    bool reportContinuousValues = false;
+    if ((mMelValues[mCurrentIndex] < kRs1OutputdBFS && mCurrentIndex > 0)) {
+        reportContinuousValues = true;
+    } else if (mMelValues[mCurrentIndex] >= kRs1OutputdBFS) {
+        // only store MEL that are above RS1
+        ++mCurrentIndex;
+    }
+
+    if (reportContinuousValues || (mCurrentIndex > mMelValues.size() - 1)) {
+        mMelWorker.newMelValues(mMelValues, mCurrentIndex, mDeviceId);
+        notifyWorker = true;
+        mCurrentIndex = 0;
+    }
+
+    if (notifyWorker) {
+        mMelWorker.mCondVar.notify_one();
+    }
+}
+
+int32_t MelProcessor::process(const void* buffer, size_t bytes) {
+    if (mPaused) {
+        return 0;
+    }
+
+    // should be uncontested and not block if process method is called from a single thread
+    std::lock_guard<std::mutex> guard(mLock);
+
+    if (!isSampleRateSupported_l()) {
+        return 0;
+    }
+
+    const size_t bytes_per_sample = audio_bytes_per_sample(mFormat);
+    size_t samples = bytes_per_sample > 0 ? bytes / bytes_per_sample : 0;
+    while (samples > 0) {
+        const size_t requiredSamples =
+            mFramesPerMelValue * mChannelCount - mCurrentSamples;
+        size_t processSamples = std::min(requiredSamples, samples);
+        processSamples -= processSamples % mChannelCount;
+
+        applyAWeight_l(buffer, processSamples);
+
+        audio_utils_accumulate_energy(mAWeightSamples.data(),
+                                      AUDIO_FORMAT_PCM_FLOAT,
+                                      processSamples,
+                                      mChannelCount,
+                                      mCurrentChannelEnergy.data());
+        mCurrentSamples += processSamples;
+
+        ALOGVV(
+            "required:%zu, process:%zu, mCurrentChannelEnergy[0]:%f, mCurrentSamples:%zu",
+            requiredSamples,
+            processSamples,
+            mCurrentChannelEnergy[0],
+            mCurrentSamples.load());
+        if (processSamples < requiredSamples) {
+            return static_cast<int32_t>(bytes);
+        }
+
+        addMelValue_l(fmaxf(
+            audio_utils_power_from_energy(getCombinedChannelEnergy_l())
+                + kMelAdjustmentDb
+                + kMeldBFSTodBSPLOffset
+                + mAttenuationDB, 0.0f));
+
+        samples -= processSamples;
+        buffer =
+            (const uint8_t*) buffer + processSamples * bytes_per_sample;
+        mCurrentSamples = 0;
+    }
+
+    return static_cast<int32_t>(bytes);
+}
+
+void MelProcessor::setAttenuation(float attenuationDB) {
+    ALOGV("%s: setting the attenuation %f", __func__, attenuationDB);
+    mAttenuationDB = attenuationDB;
+}
+
+void MelProcessor::onLastStrongRef(const void* id __attribute__((unused))) {
+   mMelWorker.stop();
+   ALOGV("%s: Stopped thread: %s for device %d", __func__, mMelWorker.mThreadName.c_str(),
+         mDeviceId.load());
+}
+
+std::string MelProcessor::pointerString() const {
+    const void * address = static_cast<const void*>(this);
+    std::stringstream aStream;
+    aStream << address;
+    return aStream.str();
+}
+
+void MelProcessor::MelWorker::run() {
+    mThread = std::thread([&]{
+        // name the thread to help identification
+        androidSetThreadName(mThreadName.c_str());
+        ALOGV("%s::run(): Started thread", mThreadName.c_str());
+
+        while (true) {
+            std::unique_lock l(mCondVarMutex);
+            if (mStopRequested) {
+                return;
+            }
+            mCondVar.wait(l, [&] { return (mRbReadPtr != mRbWritePtr) || mStopRequested; });
+
+            while (mRbReadPtr != mRbWritePtr && !mStopRequested) {
+                ALOGV("%s::run(): new callbacks, rb idx read=%zu, write=%zu",
+                      mThreadName.c_str(),
+                      mRbReadPtr.load(),
+                      mRbWritePtr.load());
+                auto callback = mCallback.promote();
+                if (callback == nullptr) {
+                    ALOGW("%s::run(): MelCallback is null, quitting MelWorker",
+                          mThreadName.c_str());
+                    return;
+                }
+
+                MelCallbackData data = mCallbackRingBuffer[mRbReadPtr];
+                if (data.mMel != 0.f) {
+                    callback->onMomentaryExposure(data.mMel, data.mPort);
+                } else if (data.mMelsSize != 0) {
+                    callback->onNewMelValues(data.mMels, 0, data.mMelsSize, data.mPort);
+                } else {
+                    ALOGE("%s::run(): Invalid MEL data. Skipping callback", mThreadName.c_str());
+                }
+                incRingBufferIndex(mRbReadPtr);
+            }
+        }
+    });
+}
+
+void MelProcessor::MelWorker::stop() {
+    bool oldValue;
+    {
+        std::lock_guard l(mCondVarMutex);
+        oldValue = mStopRequested;
+        mStopRequested = true;
+    }
+    if (!oldValue) {
+        mCondVar.notify_one();
+        mThread.join();
+    }
+}
+
+void MelProcessor::MelWorker::momentaryExposure(float mel, audio_port_handle_t port) {
+    ALOGV("%s", __func__);
+
+    if (ringBufferIsFull()) {
+        ALOGW("%s: cannot add momentary exposure for port %d, MelWorker buffer is full", __func__,
+              port);
+        return;
+    }
+
+    // worker is thread-safe, no lock since there is only one writer and we take into account
+    // spurious wake-ups
+    mCallbackRingBuffer[mRbWritePtr].mMel = mel;
+    mCallbackRingBuffer[mRbWritePtr].mMelsSize = 0;
+    mCallbackRingBuffer[mRbWritePtr].mPort = port;
+
+    incRingBufferIndex(mRbWritePtr);
+}
+
+void MelProcessor::MelWorker::newMelValues(const std::vector<float>& mels,
+                                           size_t melsSize,
+                                           audio_port_handle_t port) {
+    ALOGV("%s", __func__);
+
+    if (ringBufferIsFull()) {
+        ALOGW("%s: cannot add %zu mel values for port %d, MelWorker buffer is full", __func__,
+              melsSize, port);
+        return;
+    }
+
+    // worker is thread-safe, no lock since there is only one writer and we take into account
+    // spurious wake-ups
+    std::copy_n(std::begin(mels), melsSize, mCallbackRingBuffer[mRbWritePtr].mMels.begin());
+    mCallbackRingBuffer[mRbWritePtr].mMelsSize = melsSize;
+    mCallbackRingBuffer[mRbWritePtr].mMel = 0.f;
+    mCallbackRingBuffer[mRbWritePtr].mPort = port;
+
+    incRingBufferIndex(mRbWritePtr);
+}
+
+bool MelProcessor::MelWorker::ringBufferIsFull() const {
+    size_t curIdx = mRbWritePtr.load();
+    size_t nextIdx = curIdx >= kRingBufferSize - 1 ? 0 : curIdx + 1;
+
+    return nextIdx == mRbReadPtr;
+}
+
+void MelProcessor::MelWorker::incRingBufferIndex(std::atomic_size_t& idx) {
+    size_t nextIdx;
+    size_t expected;
+    do {
+        expected = idx.load();
+        nextIdx = expected >= kRingBufferSize - 1 ? 0 : expected + 1;
+    } while (!idx.compare_exchange_strong(expected, nextIdx));
+}
+
+}   // namespace android
diff --git a/audio_utils/benchmarks/channelmix_benchmark.cpp b/audio_utils/benchmarks/channelmix_benchmark.cpp
index 1193c11..0215f47 100644
--- a/audio_utils/benchmarks/channelmix_benchmark.cpp
+++ b/audio_utils/benchmarks/channelmix_benchmark.cpp
@@ -49,43 +49,112 @@
 
 /*
 $ adb shell /data/benchmarktest64/channelmix_benchmark/channelmix_benchmark
-Pixel 4XL Coral arm64 benchmark
+Pixel 7 arm64 benchmark
 
 -----------------------------------------------------------
 Benchmark                 Time             CPU   Iterations
 -----------------------------------------------------------
-BM_ChannelMix/0        2180 ns         2175 ns       321797 AUDIO_CHANNEL_OUT_MONO
-BM_ChannelMix/1        2180 ns         2175 ns       321901
-BM_ChannelMix/2        3265 ns         3256 ns       214957 AUDIO_CHANNEL_OUT_STEREO
-BM_ChannelMix/3        3987 ns         3978 ns       175964 AUDIO_CHANNEL_OUT_2POINT1
-BM_ChannelMix/4        4713 ns         4700 ns       148922 AUDIO_CHANNEL_OUT_2POINT0POINT2
-BM_ChannelMix/5        1050 ns         1047 ns       668462 AUDIO_CHANNEL_OUT_QUAD
-BM_ChannelMix/6        1052 ns         1049 ns       667155 AUDIO_CHANNEL_OUT_QUAD_SIDE
-BM_ChannelMix/7        4714 ns         4701 ns       148926 AUDIO_CHANNEL_OUT_SURROUND
-BM_ChannelMix/8        5437 ns         5422 ns       129099 AUDIO_CHANNEL_OUT_2POINT1POINT2
-BM_ChannelMix/9        5437 ns         5422 ns       129108 AUDIO_CHANNEL_OUT_3POINT0POINT2
-BM_ChannelMix/10       5435 ns         5422 ns       129083 AUDIO_CHANNEL_OUT_PENTA
-BM_ChannelMix/11       6161 ns         6143 ns       113945 AUDIO_CHANNEL_OUT_3POINT1POINT2
-BM_ChannelMix/12       2511 ns         2504 ns       279645 AUDIO_CHANNEL_OUT_5POINT1
-BM_ChannelMix/13       2511 ns         2503 ns       279621 AUDIO_CHANNEL_OUT_5POINT1_SIDE
-BM_ChannelMix/14       6882 ns         6865 ns       101946 AUDIO_CHANNEL_OUT_6POINT1
-BM_ChannelMix/15       7607 ns         7586 ns        92271 AUDIO_CHANNEL_OUT_5POINT1POINT2
-BM_ChannelMix/16       2812 ns         2804 ns       249729 AUDIO_CHANNEL_OUT_7POINT1
-BM_ChannelMix/17       9055 ns         9032 ns        77517 AUDIO_CHANNEL_OUT_5POINT1POINT4
-BM_ChannelMix/18       9055 ns         9031 ns        77477 AUDIO_CHANNEL_OUT_7POINT1POINT2
-BM_ChannelMix/19      10510 ns        10479 ns        66762 AUDIO_CHANNEL_OUT_7POINT1POINT4
-BM_ChannelMix/20      11293 ns        11262 ns        62135 AUDIO_CHANNEL_OUT_13POINT_360RA
-BM_ChannelMix/21      19886 ns        19829 ns        35265 AUDIO_CHANNEL_OUT_22POINT2
+channelmix_benchmark:
+  #BM_ChannelMix_Stereo/0             2266 ns     2251 ns       310903
+  #BM_ChannelMix_Stereo/1             2262 ns     2251 ns       310898
+  #BM_ChannelMix_Stereo/2              255 ns      254 ns      2754285
+  #BM_ChannelMix_Stereo/3             2969 ns     2954 ns       235901
+  #BM_ChannelMix_Stereo/4             3350 ns     3334 ns       209901
+  #BM_ChannelMix_Stereo/5              814 ns      810 ns       863246
+  #BM_ChannelMix_Stereo/6              814 ns      810 ns       863255
+  #BM_ChannelMix_Stereo/7             3349 ns     3328 ns       210234
+  #BM_ChannelMix_Stereo/8             3671 ns     3654 ns       191555
+  #BM_ChannelMix_Stereo/9             3680 ns     3654 ns       191583
+  #BM_ChannelMix_Stereo/10            3667 ns     3650 ns       191738
+  #BM_ChannelMix_Stereo/11            4109 ns     4089 ns       171118
+  #BM_ChannelMix_Stereo/12            1209 ns     1203 ns       581812
+  #BM_ChannelMix_Stereo/13            1209 ns     1203 ns       581666
+  #BM_ChannelMix_Stereo/14            4694 ns     4674 ns       149798
+  #BM_ChannelMix_Stereo/15            1306 ns     1301 ns       537843
+  #BM_ChannelMix_Stereo/16            1307 ns     1301 ns       537898
+  #BM_ChannelMix_Stereo/17            2059 ns     2050 ns       341145
+  #BM_ChannelMix_Stereo/18            2053 ns     2043 ns       342709
+  #BM_ChannelMix_Stereo/19            2462 ns     2451 ns       285554
+  #BM_ChannelMix_Stereo/20            7889 ns     7853 ns        89005
+  #BM_ChannelMix_Stereo/21            6133 ns     6104 ns       114499
+  #BM_ChannelMix_5Point1/0            1676 ns     1665 ns       420195
+  #BM_ChannelMix_5Point1/1            1675 ns     1667 ns       419527
+  #BM_ChannelMix_5Point1/2             537 ns      535 ns      1310551
+  #BM_ChannelMix_5Point1/3            3039 ns     3024 ns       231306
+  #BM_ChannelMix_5Point1/4            3763 ns     3744 ns       186929
+  #BM_ChannelMix_5Point1/5             698 ns      695 ns       990457
+  #BM_ChannelMix_5Point1/6             661 ns      657 ns      1058724
+  #BM_ChannelMix_5Point1/7            3766 ns     3748 ns       186771
+  #BM_ChannelMix_5Point1/8            4395 ns     4374 ns       159819
+  #BM_ChannelMix_5Point1/9            4389 ns     4369 ns       160144
+  #BM_ChannelMix_5Point1/10           4390 ns     4369 ns       160196
+  #BM_ChannelMix_5Point1/11           5111 ns     5084 ns       137574
+  #BM_ChannelMix_5Point1/12            652 ns      649 ns      1086857
+  #BM_ChannelMix_5Point1/13            653 ns      649 ns      1072477
+  #BM_ChannelMix_5Point1/14           5762 ns     5734 ns       122129
+  #BM_ChannelMix_5Point1/15            778 ns      774 ns       903415
+  #BM_ChannelMix_5Point1/16            778 ns      775 ns       903085
+  #BM_ChannelMix_5Point1/17           1220 ns     1214 ns       575908
+  #BM_ChannelMix_5Point1/18           1015 ns     1006 ns       694142
+  #BM_ChannelMix_5Point1/19           1382 ns     1373 ns       509721
+  #BM_ChannelMix_5Point1/20          10184 ns    10076 ns        69550
+  #BM_ChannelMix_5Point1/21           5401 ns     5362 ns       130580
+  #BM_ChannelMix_7Point1/0            1644 ns     1632 ns       428673
+  #BM_ChannelMix_7Point1/1            1640 ns     1633 ns       428639
+  #BM_ChannelMix_7Point1/2             722 ns      719 ns       973262
+  #BM_ChannelMix_7Point1/3            3076 ns     3062 ns       228509
+  #BM_ChannelMix_7Point1/4            3902 ns     3884 ns       180207
+  #BM_ChannelMix_7Point1/5             727 ns      723 ns       968505
+  #BM_ChannelMix_7Point1/6            3905 ns     3886 ns       180132
+  #BM_ChannelMix_7Point1/7            3903 ns     3886 ns       180110
+  #BM_ChannelMix_7Point1/8            4723 ns     4700 ns       148911
+  #BM_ChannelMix_7Point1/9            4727 ns     4704 ns       148850
+  #BM_ChannelMix_7Point1/10           4723 ns     4702 ns       148944
+  #BM_ChannelMix_7Point1/11           5518 ns     5492 ns       127454
+  #BM_ChannelMix_7Point1/12            723 ns      720 ns       971533
+  #BM_ChannelMix_7Point1/13           5520 ns     5492 ns       127444
+  #BM_ChannelMix_7Point1/14           6299 ns     6270 ns       111619
+  #BM_ChannelMix_7Point1/15            561 ns      559 ns      1266804
+  #BM_ChannelMix_7Point1/16            563 ns      559 ns      1254781
+  #BM_ChannelMix_7Point1/17           1240 ns     1234 ns       561452
+  #BM_ChannelMix_7Point1/18           1100 ns     1095 ns       638789
+  #BM_ChannelMix_7Point1/19           1525 ns     1518 ns       460122
+  #BM_ChannelMix_7Point1/20          10998 ns    10950 ns        63928
+  #BM_ChannelMix_7Point1/21           4656 ns     4621 ns       151487
+  #BM_ChannelMix_7Point1Point4/0      2301 ns     2290 ns       305500
+  #BM_ChannelMix_7Point1Point4/1      2301 ns     2290 ns       305620
+  #BM_ChannelMix_7Point1Point4/2       913 ns      908 ns       770049
+  #BM_ChannelMix_7Point1Point4/3      4232 ns     4212 ns       166032
+  #BM_ChannelMix_7Point1Point4/4      5241 ns     5216 ns       134179
+  #BM_ChannelMix_7Point1Point4/5      1084 ns     1079 ns       648761
+  #BM_ChannelMix_7Point1Point4/6      5243 ns     5219 ns       134126
+  #BM_ChannelMix_7Point1Point4/7      5250 ns     5226 ns       133968
+  #BM_ChannelMix_7Point1Point4/8      6225 ns     6194 ns       112973
+  #BM_ChannelMix_7Point1Point4/9      6223 ns     6193 ns       112985
+  #BM_ChannelMix_7Point1Point4/10     6223 ns     6193 ns       113047
+  #BM_ChannelMix_7Point1Point4/11     7416 ns     7380 ns        94840
+  #BM_ChannelMix_7Point1Point4/12      903 ns      899 ns       778228
+  #BM_ChannelMix_7Point1Point4/13     7414 ns     7380 ns        94835
+  #BM_ChannelMix_7Point1Point4/14     8354 ns     8314 ns        84219
+  #BM_ChannelMix_7Point1Point4/15      818 ns      815 ns       865119
+  #BM_ChannelMix_7Point1Point4/16      820 ns      816 ns       854456
+  #BM_ChannelMix_7Point1Point4/17     1106 ns     1100 ns       636240
+  #BM_ChannelMix_7Point1Point4/18     1104 ns     1099 ns       636313
+  #BM_ChannelMix_7Point1Point4/19     1151 ns     1145 ns       611497
+  #BM_ChannelMix_7Point1Point4/20    14454 ns    14385 ns        48561
+  #BM_ChannelMix_7Point1Point4/21     5982 ns     5954 ns       117562
 */
 
-static void BM_ChannelMix(benchmark::State& state) {
+template<audio_channel_mask_t OUTPUT_CHANNEL_MASK>
+static void BenchmarkChannelMix(benchmark::State& state) {
     const audio_channel_mask_t channelMask = kChannelPositionMasks[state.range(0)];
     using namespace ::android::audio_utils::channels;
-    ChannelMix channelMix(channelMask);
+    ChannelMix<OUTPUT_CHANNEL_MASK> channelMix(channelMask);
+    const size_t outChannels = audio_channel_count_from_out_mask(OUTPUT_CHANNEL_MASK);
     constexpr size_t frameCount = 1024;
     size_t inChannels = audio_channel_count_from_out_mask(channelMask);
     std::vector<float> input(inChannels * frameCount);
-    std::vector<float> output(FCC_2 * frameCount);
+    std::vector<float> output(outChannels * frameCount);
     constexpr float amplitude = 0.01f;
 
     std::minstd_rand gen(channelMask);
@@ -107,12 +176,34 @@
     state.SetLabel(audio_channel_out_mask_to_string(channelMask));
 }
 
+static void BM_ChannelMix_Stereo(benchmark::State& state) {
+    BenchmarkChannelMix<AUDIO_CHANNEL_OUT_STEREO>(state);
+}
+
+static void BM_ChannelMix_5Point1(benchmark::State& state) {
+    BenchmarkChannelMix<AUDIO_CHANNEL_OUT_5POINT1>(state);
+}
+
+static void BM_ChannelMix_7Point1(benchmark::State& state) {
+    BenchmarkChannelMix<AUDIO_CHANNEL_OUT_7POINT1>(state);
+}
+
+static void BM_ChannelMix_7Point1Point4(benchmark::State& state) {
+    BenchmarkChannelMix<AUDIO_CHANNEL_OUT_7POINT1POINT4>(state);
+}
+
 static void ChannelMixArgs(benchmark::internal::Benchmark* b) {
     for (int i = 0; i < (int)std::size(kChannelPositionMasks); i++) {
         b->Args({i});
     }
 }
 
-BENCHMARK(BM_ChannelMix)->Apply(ChannelMixArgs);
+BENCHMARK(BM_ChannelMix_Stereo)->Apply(ChannelMixArgs);
+
+BENCHMARK(BM_ChannelMix_5Point1)->Apply(ChannelMixArgs);
+
+BENCHMARK(BM_ChannelMix_7Point1)->Apply(ChannelMixArgs);
+
+BENCHMARK(BM_ChannelMix_7Point1Point4)->Apply(ChannelMixArgs);
 
 BENCHMARK_MAIN();
diff --git a/audio_utils/hal_smoothness.c b/audio_utils/hal_smoothness.c
new file mode 100644
index 0000000..1f3c04b
--- /dev/null
+++ b/audio_utils/hal_smoothness.c
@@ -0,0 +1,223 @@
+/*
+** Copyright 2022, The Android Open-Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "hal_smoothness"
+
+#include <audio_utils/hal_smoothness.h>
+#include <errno.h>
+#include <float.h>
+#include <log/log.h>
+#include <math.h>
+#include <stdlib.h>
+
+typedef struct hal_smoothness_internal {
+  struct hal_smoothness itfe;
+
+  struct hal_smoothness_metrics metrics;
+
+  // number of “total_writes” before flushing smoothness data to system (ie.
+  // logcat) A flush will also reset all numeric values in the "metrics" field.
+  unsigned int num_writes_to_log;
+
+  // Client defined function to flush smoothness metrics.
+  void (*client_flush_cb)(struct hal_smoothness_metrics *smoothness_metrics,
+                          void *private_data);
+
+  // Client provided pointer.
+  void *private_data;
+} hal_smoothness_internal;
+
+static void reset_metrics(struct hal_smoothness_metrics *metrics) {
+  metrics->underrun_count = 0;
+  metrics->overrun_count = 0;
+  metrics->total_writes = 0;
+  metrics->total_frames_written = 0;
+  metrics->total_frames_lost = 0;
+  metrics->timestamp = 0;
+  metrics->smoothness_value = 0.0;
+}
+
+static bool add_check_overflow(unsigned int *data, unsigned int add_amount) {
+  return __builtin_add_overflow(*data, add_amount, data);
+}
+
+static int increment_underrun(struct hal_smoothness *smoothness,
+                              unsigned int frames_lost) {
+  if (smoothness == NULL) {
+    return -EINVAL;
+  }
+
+  hal_smoothness_internal *smoothness_meta =
+      (hal_smoothness_internal *)smoothness;
+
+  if (add_check_overflow(&smoothness_meta->metrics.underrun_count, 1)) {
+    return -EOVERFLOW;
+  }
+
+  if (add_check_overflow(&smoothness_meta->metrics.total_frames_lost,
+                         frames_lost)) {
+    return -EOVERFLOW;
+  }
+
+  return 0;
+}
+
+static int increment_overrun(struct hal_smoothness *smoothness,
+                             unsigned int frames_lost) {
+  if (smoothness == NULL) {
+    return -EINVAL;
+  }
+
+  hal_smoothness_internal *smoothness_meta =
+      (hal_smoothness_internal *)smoothness;
+
+  if (add_check_overflow(&smoothness_meta->metrics.overrun_count, 1)) {
+    return -EOVERFLOW;
+  }
+
+  if (add_check_overflow(&smoothness_meta->metrics.total_frames_lost,
+                         frames_lost)) {
+    return -EOVERFLOW;
+  }
+
+  return 0;
+}
+
+static double calc_smoothness_value(unsigned int total_frames_lost,
+                                    unsigned int total_frames_written) {
+  // If error checks are correct in this library, this error shouldn't be
+  // possible.
+  if (total_frames_lost == 0 && total_frames_written == 0) {
+    ALOGE("total_frames_lost + total_frames_written shouldn't = 0");
+    return -EINVAL;
+  }
+
+  // No bytes dropped, so audio smoothness is perfect.
+  if (total_frames_lost == 0) {
+    return DBL_MAX;
+  }
+
+  unsigned int total_frames = total_frames_lost;
+
+  if (add_check_overflow(&total_frames, total_frames_written)) {
+    return -EOVERFLOW;
+  }
+
+  // Division by 0 shouldn't be possible.
+  double lost_frames_ratio = (double)total_frames_lost / total_frames;
+
+  // log(0) shouldn't be possible.
+  return -log(lost_frames_ratio);
+}
+
+static int flush(struct hal_smoothness *smoothness) {
+  if (smoothness == NULL) {
+    return -EINVAL;
+  }
+
+  hal_smoothness_internal *smoothness_meta =
+      (hal_smoothness_internal *)smoothness;
+
+  smoothness_meta->metrics.smoothness_value =
+      calc_smoothness_value(smoothness_meta->metrics.total_frames_lost,
+                            smoothness_meta->metrics.total_frames_written);
+  smoothness_meta->client_flush_cb(&smoothness_meta->metrics,
+                                   smoothness_meta->private_data);
+  reset_metrics(&smoothness_meta->metrics);
+
+  return 0;
+}
+
+static int increment_total_writes(struct hal_smoothness *smoothness,
+                                  unsigned int frames_written,
+                                  unsigned long timestamp) {
+  if (smoothness == NULL) {
+    return -EINVAL;
+  }
+
+  hal_smoothness_internal *smoothness_meta =
+      (hal_smoothness_internal *)smoothness;
+
+  if (add_check_overflow(&smoothness_meta->metrics.total_writes, 1)) {
+    return -EOVERFLOW;
+  }
+
+  if (add_check_overflow(&smoothness_meta->metrics.total_frames_written,
+                         frames_written)) {
+    return -EOVERFLOW;
+  }
+  smoothness_meta->metrics.timestamp = timestamp;
+
+  // "total_writes" count has met a value where the client's callback function
+  // should be called
+  if (smoothness_meta->metrics.total_writes >=
+      smoothness_meta->num_writes_to_log) {
+    flush(smoothness);
+  }
+
+  return 0;
+}
+
+int hal_smoothness_initialize(
+    struct hal_smoothness **smoothness, unsigned int version,
+    unsigned int num_writes_to_log,
+    void (*client_flush_cb)(struct hal_smoothness_metrics *, void *),
+    void *private_data) {
+  if (num_writes_to_log == 0) {
+    ALOGE("num_writes_to_logs must be > 0");
+
+    return -EINVAL;
+  }
+
+  if (client_flush_cb == NULL) {
+    ALOGE("client_flush_cb can't be NULL");
+
+    return -EINVAL;
+  }
+
+  hal_smoothness_internal *smoothness_meta;
+  smoothness_meta =
+      (hal_smoothness_internal *)calloc(1, sizeof(hal_smoothness_internal));
+
+  if (smoothness_meta == NULL) {
+    int ret_err = errno;
+    ALOGE("failed to calloc hal_smoothness_internal.");
+    return ret_err;
+  }
+
+  smoothness_meta->itfe.version = version;
+  smoothness_meta->itfe.increment_underrun = increment_underrun;
+  smoothness_meta->itfe.increment_overrun = increment_overrun;
+  smoothness_meta->itfe.increment_total_writes = increment_total_writes;
+  smoothness_meta->itfe.flush = flush;
+
+  smoothness_meta->num_writes_to_log = num_writes_to_log;
+  smoothness_meta->client_flush_cb = client_flush_cb;
+  smoothness_meta->private_data = private_data;
+
+  *smoothness = &smoothness_meta->itfe;
+
+  return 0;
+}
+
+void hal_smoothness_free(struct hal_smoothness **smoothness) {
+  if (smoothness == NULL || *smoothness == NULL) {
+    return;
+  }
+
+  free(*smoothness);
+  *smoothness = NULL;
+}
diff --git a/audio_utils/include/audio_utils/ChannelMix.h b/audio_utils/include/audio_utils/ChannelMix.h
index 1799c94..a4ded79 100644
--- a/audio_utils/include/audio_utils/ChannelMix.h
+++ b/audio_utils/include/audio_utils/ChannelMix.h
@@ -20,17 +20,487 @@
 
 namespace android::audio_utils::channels {
 
+// sparseChannelMatrixMultiply must be compiled with specific compiler flags
+// for optimization.  The body is in ChannelMix.cpp.
+template <audio_channel_mask_t INPUT_CHANNEL_MASK,
+        audio_channel_mask_t OUTPUT_CHANNEL_MASK, bool ACCUMULATE>
+bool sparseChannelMatrixMultiply(const float *src, float *dst, size_t frameCount);
+
+inline float clamp(float value) {
+    constexpr float LIMIT_AMPLITUDE = M_SQRT2;       // 3dB = 1.41421356
+    return fmin(fmax(value, -LIMIT_AMPLITUDE), LIMIT_AMPLITUDE);
+}
+
+// This method can be evaluated constexpr.
+template <audio_channel_mask_t OUTPUT_CHANNEL_MASK, size_t M>
+constexpr bool fillChannelMatrix(audio_channel_mask_t INPUT_CHANNEL_MASK,
+        float (&matrix)[M][audio_channel_count_from_out_mask(OUTPUT_CHANNEL_MASK)]) {
+
+    // This is a bit long since there is no functional partial template specialization.
+    if constexpr (OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_STEREO) {
+        // Compute at what index each channel is: samples will be in the following order:
+        //   FL  FR  FC    LFE   BL  BR  BC    SL  SR
+        //
+        // Prior to API 32, use of downmix resulted in channels being scaled in half amplitude.
+        // We now use a compliant downmix matrix for 5.1 with the following standards:
+        // ITU-R 775-2, ATSC A/52, ETSI TS 101 154, IEC 14496-3, which is unity gain for the
+        // front left and front right channel contribution.
+        //
+        // For 7.1 to 5.1 we set equal contributions for the side and back channels
+        // which follow Dolby downmix recommendations.
+        //
+        // We add contributions from the LFE into the L and R channels
+        // at a weight of 0.5 (rather than the power preserving 0.707)
+        // which is to ensure that headphones can still experience LFE
+        // with lesser risk of speaker overload.
+        //
+        // Note: geometrically left and right channels contribute only to the corresponding
+        // left and right outputs respectively.  Geometrically center channels contribute
+        // to both left and right outputs, so they are scaled by 0.707 to preserve power.
+        //
+        //  (transfer matrix)
+        //   FL  FR  FC    LFE  BL  BR     BC  SL    SR
+        //   1.0     0.707 0.5  0.707      0.5 0.707
+        //       1.0 0.707 0.5       0.707 0.5       0.707
+        size_t index = 0;
+        constexpr float COEF_25 = 0.2508909536f;
+        constexpr float COEF_35 = 0.3543928915f;
+        constexpr float COEF_36 = 0.3552343859f;
+        constexpr float COEF_61 = 0.6057043428f;
+        constexpr float MINUS_3_DB_IN_FLOAT = M_SQRT1_2; // -3dB = 0.70710678
+
+        constexpr size_t FL = 0;
+        constexpr size_t FR = 1;
+        for (unsigned tmp = INPUT_CHANNEL_MASK; tmp != 0; ++index) {
+            if (index >= M) return false;
+            const unsigned lowestBit = tmp & -(signed)tmp;
+            switch (lowestBit) {
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT:
+                    matrix[index][FL] = 1.f;
+                    matrix[index][FR] = 0.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_SIDE_LEFT:
+                case AUDIO_CHANNEL_OUT_BACK_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_LEFT:
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_LEFT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FL] = MINUS_3_DB_IN_FLOAT;
+                    matrix[index][FR] = 0.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT:
+                    matrix[index][FL] = 0.f;
+                    matrix[index][FR] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_SIDE_RIGHT:
+                case AUDIO_CHANNEL_OUT_BACK_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT:
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_RIGHT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FL] = 0.f;
+                    matrix[index][FR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER:
+                    matrix[index][FL] = matrix[index][FR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT:
+                    matrix[index][FL] = COEF_61;
+                    matrix[index][FR] = 0.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT:
+                    matrix[index][FL] = 0.f;
+                    matrix[index][FR] = COEF_61;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER:
+                    matrix[index][FL] = COEF_61;
+                    matrix[index][FR] = COEF_25;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER:
+                    matrix[index][FL] = COEF_25;
+                    matrix[index][FR] = COEF_61;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_CENTER:
+                    matrix[index][FL] = matrix[index][FR] = COEF_36;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_BACK_CENTER:
+                    matrix[index][FL] = matrix[index][FR] = COEF_35;
+                    break;
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2:
+                    matrix[index][FL] = 0.f;
+                    matrix[index][FR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY:
+                    if (INPUT_CHANNEL_MASK & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) {
+                        matrix[index][FL] = MINUS_3_DB_IN_FLOAT;
+                        matrix[index][FR] = 0.f;
+                        break;
+                    }
+                    FALLTHROUGH_INTENDED;
+                case AUDIO_CHANNEL_OUT_BACK_CENTER:
+                    matrix[index][FL] = matrix[index][FR] = 0.5f;
+                    break;
+            }
+            tmp ^= lowestBit;
+        }
+        return true;
+    } else if constexpr (OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_5POINT1) {
+        //   FL  FR  FC  LFE  BL  BR
+        size_t index = 0;
+        constexpr float MINUS_3_DB_IN_FLOAT = M_SQRT1_2; // -3dB = 0.70710678
+        constexpr float MINUS_4_5_DB_IN_FLOAT = 0.5946035575f;
+
+        constexpr size_t FL = 0;
+        constexpr size_t FR = 1;
+        constexpr size_t FC = 2;
+        constexpr size_t LFE = 3;
+        constexpr size_t BL = 4;
+        constexpr size_t BR = 5;
+        for (unsigned tmp = INPUT_CHANNEL_MASK; tmp != 0; ++index) {
+            if (index >= M) return false;
+            const unsigned lowestBit = tmp & -(signed)tmp;
+            matrix[index][FL] = matrix[index][FR] = matrix[index][FC] = 0.f;
+            matrix[index][LFE] = matrix[index][BL] = matrix[index][BR] = 0.f;
+            switch (lowestBit) {
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT:
+                    matrix[index][FL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT:
+                    matrix[index][FR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER:
+                    matrix[index][FC] = 1.f;
+                    break;
+
+                // ADJUST
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_LEFT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FL] = MINUS_3_DB_IN_FLOAT;
+                    matrix[index][BL] = MINUS_4_5_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_RIGHT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FR] = MINUS_3_DB_IN_FLOAT;
+                    matrix[index][BR] = MINUS_4_5_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER:
+                    matrix[index][FL] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][FC] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER:
+                    matrix[index][FR] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][FC] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_SIDE_LEFT:
+                case AUDIO_CHANNEL_OUT_BACK_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_LEFT:
+                    matrix[index][BL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_SIDE_RIGHT:
+                case AUDIO_CHANNEL_OUT_BACK_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT:
+                    matrix[index][BR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT:
+                    matrix[index][BL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT:
+                    matrix[index][BR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_BACK_CENTER:
+                case AUDIO_CHANNEL_OUT_BACK_CENTER:
+                    matrix[index][BL] = matrix[index][BR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_CENTER:
+                    matrix[index][FC] = matrix[index][BL] = matrix[index][BR] = 0.5f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY:
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2:
+                    matrix[index][LFE] = 1.f;
+                    break;
+            }
+            tmp ^= lowestBit;
+        }
+        return true;
+    } else if constexpr (OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_7POINT1) {
+        //   FL  FR  FC  LFE  BL  BR  SL  SR
+        size_t index = 0;
+        constexpr float MINUS_3_DB_IN_FLOAT = M_SQRT1_2; // -3dB = 0.70710678
+        constexpr float MINUS_4_5_DB_IN_FLOAT = 0.5946035575f;
+
+        constexpr size_t FL = 0;
+        constexpr size_t FR = 1;
+        constexpr size_t FC = 2;
+        constexpr size_t LFE = 3;
+        constexpr size_t BL = 4;
+        constexpr size_t BR = 5;
+        constexpr size_t SL = 6;
+        constexpr size_t SR = 7;
+        for (unsigned tmp = INPUT_CHANNEL_MASK; tmp != 0; ++index) {
+            if (index >= M) return false;
+            const unsigned lowestBit = tmp & -(signed)tmp;
+            matrix[index][FL] = matrix[index][FR] = matrix[index][FC] = 0.f;
+            matrix[index][LFE] = matrix[index][BL] = matrix[index][BR] = 0.f;
+            switch (lowestBit) {
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT:
+                    matrix[index][FL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT:
+                    matrix[index][FR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER:
+                    matrix[index][FC] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_LEFT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FL] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][SL] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_RIGHT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FR] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][SR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER:
+                    matrix[index][FL] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][FC] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER:
+                    matrix[index][FR] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][FC] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_BACK_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_LEFT:
+                    matrix[index][BL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_BACK_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT:
+                    matrix[index][BR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_SIDE_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT:
+                    matrix[index][SL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_SIDE_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT:
+                    matrix[index][SR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_BACK_CENTER:
+                case AUDIO_CHANNEL_OUT_BACK_CENTER:
+                    matrix[index][BL] = matrix[index][BR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_CENTER:
+                    matrix[index][FC] = matrix[index][BL] = matrix[index][BR] = 0.5f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY:
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2:
+                    matrix[index][LFE] = 1.f;
+                    break;
+            }
+            tmp ^= lowestBit;
+        }
+        return true;
+    } else if constexpr (OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_7POINT1POINT4) {
+        //   FL  FR  FC  LFE  BL  BR  SL  SR  TFL  TFR  TBL  TBR
+        size_t index = 0;
+        constexpr float MINUS_3_DB_IN_FLOAT = M_SQRT1_2; // -3dB = 0.70710678
+        constexpr float MINUS_4_5_DB_IN_FLOAT = 0.5946035575f;
+
+        constexpr size_t FL = 0;
+        constexpr size_t FR = 1;
+        constexpr size_t FC = 2;
+        constexpr size_t LFE = 3;
+        constexpr size_t BL = 4;
+        constexpr size_t BR = 5;
+        constexpr size_t SL = 6;
+        constexpr size_t SR = 7;
+        constexpr size_t TFL = 8;
+        constexpr size_t TFR = 9;
+        constexpr size_t TBL = 10;
+        constexpr size_t TBR = 11;
+        for (unsigned tmp = INPUT_CHANNEL_MASK; tmp != 0; ++index) {
+            if (index >= M) return false;
+            const unsigned lowestBit = tmp & -(signed)tmp;
+            matrix[index][FL] = matrix[index][FR] = matrix[index][FC] = 0.f;
+            matrix[index][LFE] = matrix[index][BL] = matrix[index][BR] = 0.f;
+            switch (lowestBit) {
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT:
+                    matrix[index][TFL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT:
+                    matrix[index][TFR] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER:
+                    matrix[index][TFL] = matrix[index][TFR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT:
+                    matrix[index][FL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT:
+                    matrix[index][FR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_CENTER:
+                case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER:
+                    matrix[index][FC] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_LEFT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FL] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][SL] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_WIDE_RIGHT: // FRONT_WIDE closer to SIDE.
+                    matrix[index][FR] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][SR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER:
+                    matrix[index][FL] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][FC] = MINUS_3_DB_IN_FLOAT;
+                    break;
+                case AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER:
+                    matrix[index][FR] = MINUS_4_5_DB_IN_FLOAT;
+                    matrix[index][FC] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_BACK_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_LEFT:
+                    matrix[index][BL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_BACK_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT:
+                    matrix[index][BR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_SIDE_LEFT:
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT:
+                    matrix[index][SL] = 1.f;
+                    break;
+                case AUDIO_CHANNEL_OUT_SIDE_RIGHT:
+                case AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT:
+                    matrix[index][SR] = 1.f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_BACK_CENTER:
+                    matrix[index][TBL] = matrix[index][TBR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_BACK_CENTER:
+                    matrix[index][BL] = matrix[index][BR] = MINUS_3_DB_IN_FLOAT;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_TOP_CENTER:
+                    matrix[index][TFL] = matrix[index][TFR] = 0.5f;
+                    matrix[index][TBL] = matrix[index][TBR] = 0.5f;
+                    break;
+
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY:
+                case AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2:
+                    matrix[index][LFE] = 1.f;
+                    break;
+            }
+            tmp ^= lowestBit;
+        }
+        return true;
+    } else /* constexpr */ {
+        // We only accept NONE here as we don't do anything in that case.
+        static_assert(OUTPUT_CHANNEL_MASK==AUDIO_CHANNEL_NONE);
+        return true;
+    }
+    return false;
+}
+
+class IChannelMix {
+public:
+    virtual ~IChannelMix() = default;
+
+    /**
+     * Set the input channel mask.
+     *
+     * \param inputChannelMask channel position mask for input data.
+     *
+     * \return false if the channel mask is not supported.
+     */
+    virtual bool setInputChannelMask(audio_channel_mask_t inputChannelMask) = 0;
+
+    /**
+     * Returns the input channel mask.
+     */
+    virtual audio_channel_mask_t getInputChannelMask() const = 0;
+
+    /**
+     * Remixes audio data in src to dst.
+     *
+     * \param src          input audio buffer to remix
+     * \param dst          remixed audio samples
+     * \param frameCount   number of frames to remix
+     * \param accumulate   is true if the remix is added to the destination or
+     *                     false if the remix replaces the destination.
+     *
+     * \return false if the channel mask set is not supported.
+     */
+    virtual bool process(
+            const float *src, float *dst, size_t frameCount, bool accumulate) const = 0;
+
+    /**
+     * Remixes audio data in src to dst.
+     *
+     * \param src          input audio buffer to remix
+     * \param dst          remixed audio samples
+     * \param frameCount   number of frames to remix
+     * \param accumulate   is true if the remix is added to the destination or
+     *                     false if the remix replaces the destination.
+     * \param inputChannelMask channel position mask for input data.
+     *
+     * \return false if the channel mask set is not supported.
+     */
+    virtual bool process(const float *src, float *dst, size_t frameCount, bool accumulate,
+            audio_channel_mask_t inputChannelMask) = 0;
+
+    /** Built in ChannelMix factory. */
+    static std::shared_ptr<IChannelMix> create(audio_channel_mask_t outputChannelMask);
+
+    /** Returns true if the Built-in factory supports the outputChannelMask */
+    static bool isOutputChannelMaskSupported(audio_channel_mask_t outputChannelMask);
+};
+
 /**
  * ChannelMix
  *
  * Converts audio streams with different positional channel configurations.
- * Currently only downmix to stereo is supported, so there is no outputChannelMask argument.
  *
- * TODO: In the future, consider downmix to 7.1 and 5.1 targets instead of just stereo.
  */
-class ChannelMix {
+template <audio_channel_mask_t OUTPUT_CHANNEL_MASK>
+class ChannelMix : public IChannelMix {
 public:
-
     /**
      * Creates a ChannelMix object
      *
@@ -45,166 +515,32 @@
 
     ChannelMix() = default;
 
-    /**
-     * Set the input channel mask.
-     *
-     * \param inputChannelMask channel position mask for input data.
-     *
-     * \return false if the channel mask is not supported.
-     */
-    bool setInputChannelMask(audio_channel_mask_t inputChannelMask) {
+    bool setInputChannelMask(audio_channel_mask_t inputChannelMask) override {
         if (mInputChannelMask != inputChannelMask) {
             if (inputChannelMask & ~((1 << MAX_INPUT_CHANNELS_SUPPORTED) - 1)) {
                 return false;  // not channel position mask, or has unknown channels.
             }
-
-            // Compute at what index each channel is: samples will be in the following order:
-            //   FL  FR  FC    LFE   BL  BR  BC    SL  SR
-            //
-            // Prior to API 32, use of downmix resulted in channels being scaled in half amplitude.
-            // We now use a compliant downmix matrix for 5.1 with the following standards:
-            // ITU-R 775-2, ATSC A/52, ETSI TS 101 154, IEC 14496-3, which is unity gain for the
-            // front left and front right channel contribution.
-            //
-            // For 7.1 to 5.1 we set equal contributions for the side and back channels
-            // which follow Dolby downmix recommendations.
-            //
-            // We add contributions from the LFE into the L and R channels
-            // at a weight of 0.5 (rather than the power preserving 0.707)
-            // which is to ensure that headphones can still experience LFE
-            // with lesser risk of speaker overload.
-            //
-            // Note: geometrically left and right channels contribute only to the corresponding
-            // left and right outputs respectively.  Geometrically center channels contribute
-            // to both left and right outputs, so they are scaled by 0.707 to preserve power.
-            //
-            //  (transfer matrix)
-            //   FL  FR  FC    LFE  BL  BR     BC  SL    SR
-            //   1.0     0.707 0.5  0.707      0.5 0.707
-            //       1.0 0.707 0.5       0.707 0.5       0.707
-            int index = 0;
-            constexpr float COEF_25 = 0.2508909536f;
-            constexpr float COEF_35 = 0.3543928915f;
-            constexpr float COEF_36 = 0.3552343859f;
-            constexpr float COEF_61 = 0.6057043428f;
-            for (unsigned tmp = inputChannelMask; tmp != 0; ++index) {
-                const unsigned lowestBit = tmp & -(signed)tmp;
-                switch (lowestBit) {
-                    case AUDIO_CHANNEL_OUT_FRONT_LEFT:
-                    case AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT:
-                    case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT:
-                        mMatrix[index][0] = 1.f;
-                        mMatrix[index][1] = 0.f;
-                        break;
-                    case AUDIO_CHANNEL_OUT_SIDE_LEFT:
-                    case AUDIO_CHANNEL_OUT_BACK_LEFT:
-                    case AUDIO_CHANNEL_OUT_TOP_BACK_LEFT:
-                    case AUDIO_CHANNEL_OUT_FRONT_WIDE_LEFT: // FRONT_WIDE closer to SIDE.
-                        mMatrix[index][0] = MINUS_3_DB_IN_FLOAT;
-                        mMatrix[index][1] = 0.f;
-                        break;
-                    case AUDIO_CHANNEL_OUT_FRONT_RIGHT:
-                    case AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT:
-                    case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT:
-                        mMatrix[index][0] = 0.f;
-                        mMatrix[index][1] = 1.f;
-                        break;
-                    case AUDIO_CHANNEL_OUT_SIDE_RIGHT:
-                    case AUDIO_CHANNEL_OUT_BACK_RIGHT:
-                    case AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT:
-                    case AUDIO_CHANNEL_OUT_FRONT_WIDE_RIGHT: // FRONT_WIDE closer to SIDE.
-                        mMatrix[index][0] = 0.f;
-                        mMatrix[index][1] = MINUS_3_DB_IN_FLOAT;
-                        break;
-                    case AUDIO_CHANNEL_OUT_FRONT_CENTER:
-                    case AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER:
-                    case AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER:
-                        mMatrix[index][0] = mMatrix[index][1] = MINUS_3_DB_IN_FLOAT;
-                        break;
-                    case AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT:
-                        mMatrix[index][0] = COEF_61;
-                        mMatrix[index][1] = 0.f;
-                        break;
-                    case AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT:
-                        mMatrix[index][0] = 0.f;
-                        mMatrix[index][1] = COEF_61;
-                        break;
-                    case AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER:
-                        mMatrix[index][0] = COEF_61;
-                        mMatrix[index][1] = COEF_25;
-                        break;
-                    case AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER:
-                        mMatrix[index][0] = COEF_25;
-                        mMatrix[index][1] = COEF_61;
-                        break;
-                    case AUDIO_CHANNEL_OUT_TOP_CENTER:
-                        mMatrix[index][0] = mMatrix[index][1] = COEF_36;
-                        break;
-                    case AUDIO_CHANNEL_OUT_TOP_BACK_CENTER:
-                        mMatrix[index][0] = mMatrix[index][1] = COEF_35;
-                        break;
-                    case AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2:
-                        mMatrix[index][0] = 0.f;
-                        mMatrix[index][1] = MINUS_3_DB_IN_FLOAT;
-                        break;
-                    case AUDIO_CHANNEL_OUT_LOW_FREQUENCY:
-                        if (inputChannelMask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) {
-                            mMatrix[index][0] = MINUS_3_DB_IN_FLOAT;
-                            mMatrix[index][1] = 0.f;
-                            break;
-                        }
-                        FALLTHROUGH_INTENDED;
-                    case AUDIO_CHANNEL_OUT_BACK_CENTER:
-                        mMatrix[index][0] = mMatrix[index][1] = 0.5f;
-                        break;
-                }
-                tmp ^= lowestBit;
+            if (!fillChannelMatrix<OUTPUT_CHANNEL_MASK>(inputChannelMask, mMatrix)) {
+                return false;  // missized matrix.
             }
             mInputChannelMask = inputChannelMask;
-            // Note: mLastValidChannelIndexPlusOne is the same as mInputChannelCount for
-            // this particular matrix, as it has a nonzero column for every channel position.
-            mInputChannelCount = mLastValidChannelIndexPlusOne = index;
+            mInputChannelCount = audio_channel_count_from_out_mask(inputChannelMask);
         }
         return true;
     }
 
-    /**
-     * Returns the input channel mask.
-     */
-    audio_channel_mask_t getInputChannelMask() const {
+    audio_channel_mask_t getInputChannelMask() const override {
         return mInputChannelMask;
     }
 
-    /**
-     * Downmixes audio data in src to dst.
-     *
-     * \param src          input audio buffer to downmix
-     * \param dst          downmixed stereo audio samples
-     * \param frameCount   number of frames to downmix
-     * \param accumulate   is true if the downmix is added to the destination or
-     *                     false if the downmix replaces the destination.
-     *
-     * \return false if the channel mask set is not supported.
-     */
-    bool process(const float *src, float *dst, size_t frameCount, bool accumulate) const {
+    bool process(const float *src, float *dst, size_t frameCount,
+            bool accumulate) const override {
         return accumulate ? processSwitch<true>(src, dst, frameCount)
                 : processSwitch<false>(src, dst, frameCount);
     }
 
-    /**
-     * Downmixes audio data in src to dst.
-     *
-     * \param src          input audio buffer to downmix
-     * \param dst          downmixed stereo audio samples
-     * \param frameCount   number of frames to downmix
-     * \param accumulate   is true if the downmix is added to the destination or
-     *                     false if the downmix replaces the destination.
-     * \param inputChannelMask channel position mask for input data.
-     *
-     * \return false if the channel mask set is not supported.
-     */
-    bool process(const float *src, float *dst, size_t frameCount, bool accumulate,
-            audio_channel_mask_t inputChannelMask) {
+    bool process(const float *src, float *dst, size_t frameCount,
+            bool accumulate, audio_channel_mask_t inputChannelMask) override {
         return setInputChannelMask(inputChannelMask) && process(src, dst, frameCount, accumulate);
     }
 
@@ -212,31 +548,30 @@
     static constexpr size_t MAX_INPUT_CHANNELS_SUPPORTED = FCC_26;
 
 private:
+    // Static/const parameters.
+    static inline constexpr audio_channel_mask_t mOutputChannelMask = OUTPUT_CHANNEL_MASK;
+    static inline constexpr size_t mOutputChannelCount =
+            audio_channel_count_from_out_mask(OUTPUT_CHANNEL_MASK);
+    static inline constexpr float MINUS_3_DB_IN_FLOAT = M_SQRT1_2; // -3dB = 0.70710678
+
     // These values are modified only when the input channel mask changes.
     // Keep alignment for matrix for more stable benchmarking.
-    // Currently only stereo output supported.
-    alignas(128) float mMatrix[MAX_INPUT_CHANNELS_SUPPORTED][FCC_2];
+    //
+    // DO NOT change the order of these variables without running
+    // atest channelmix_benchmark
+    alignas(128) float mMatrix[MAX_INPUT_CHANNELS_SUPPORTED][mOutputChannelCount];
     audio_channel_mask_t mInputChannelMask = AUDIO_CHANNEL_NONE;
-    size_t mLastValidChannelIndexPlusOne = 0;
     size_t mInputChannelCount = 0;
 
-    // Static/const parameters.
-    static inline constexpr size_t mOutputChannelCount = FCC_2;    // stereo out only
-    static inline constexpr float MINUS_3_DB_IN_FLOAT = M_SQRT1_2; // -3dB = 0.70710678
-    static inline constexpr float LIMIT_AMPLITUDE = M_SQRT2;       // 3dB = 1.41421356
-    static inline float clamp(float value) {
-        return fmin(fmax(value, -LIMIT_AMPLITUDE), LIMIT_AMPLITUDE);
-    }
-
     /**
-     * Downmixes audio data in src to dst.
+     * Remixes audio data in src to dst.
      *
-     * ACCUMULATE is true if the downmix is added to the destination or
-     *               false if the downmix replaces the destination.
+     * ACCUMULATE is true if the remix is added to the destination or
+     *               false if the remix replaces the destination.
      *
-     * \param src          multichannel audio buffer to downmix
-     * \param dst          downmixed stereo audio samples
-     * \param frameCount   number of multichannel frames to downmix
+     * \param src          multichannel audio buffer to remix
+     * \param dst          remixed audio samples
+     * \param frameCount   number of multichannel frames to remix
      *
      * \return false if the CHANNEL_COUNT is not supported.
      */
@@ -244,17 +579,76 @@
     bool processSwitch(const float *src, float *dst, size_t frameCount) const {
         constexpr bool ANDROID_SPECIFIC = true;  // change for testing.
         if constexpr (ANDROID_SPECIFIC) {
-            switch (mInputChannelMask) {
-            case AUDIO_CHANNEL_OUT_QUAD_BACK:
-            case AUDIO_CHANNEL_OUT_QUAD_SIDE:
-                return specificProcess<4 /* CHANNEL_COUNT */, ACCUMULATE>(src, dst, frameCount);
-            case AUDIO_CHANNEL_OUT_5POINT1_BACK:
-            case AUDIO_CHANNEL_OUT_5POINT1_SIDE:
-                return specificProcess<6 /* CHANNEL_COUNT */, ACCUMULATE>(src, dst, frameCount);
-            case AUDIO_CHANNEL_OUT_7POINT1:
-                return specificProcess<8 /* CHANNEL_COUNT */, ACCUMULATE>(src, dst, frameCount);
-            default:
-                break; // handled below.
+            if constexpr (OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_STEREO
+                    || OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_5POINT1) {
+                switch (mInputChannelMask) {
+                case AUDIO_CHANNEL_OUT_STEREO:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_STEREO,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_QUAD_BACK:
+                case AUDIO_CHANNEL_OUT_QUAD_SIDE:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_QUAD_BACK,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_5POINT1_BACK:
+                case AUDIO_CHANNEL_OUT_5POINT1_SIDE:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_5POINT1_BACK,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_5POINT1POINT2:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_5POINT1POINT2,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_5POINT1POINT4:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_5POINT1POINT4,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_7POINT1:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_7POINT1,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_7POINT1POINT2:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_7POINT1POINT2,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_7POINT1POINT4:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_7POINT1POINT4,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_22POINT2:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_22POINT2,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                default:
+                    break; // handled below.
+                }
+            } else if constexpr (OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_7POINT1
+                    || OUTPUT_CHANNEL_MASK == AUDIO_CHANNEL_OUT_7POINT1POINT4) {
+                switch (mInputChannelMask) {
+                case AUDIO_CHANNEL_OUT_STEREO:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_STEREO,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_QUAD_BACK:
+                // Note: case AUDIO_CHANNEL_OUT_QUAD_SIDE is not equivalent.
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_QUAD_BACK,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_5POINT1_BACK:
+                // Note: case AUDIO_CHANNEL_OUT_5POINT1_SIDE is not equivalent.
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_5POINT1_BACK,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_5POINT1POINT2:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_5POINT1POINT2,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_5POINT1POINT4:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_5POINT1POINT4,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_7POINT1:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_7POINT1,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_7POINT1POINT2:
+                    return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_7POINT1POINT2,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_7POINT1POINT4:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_7POINT1POINT4,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                case AUDIO_CHANNEL_OUT_22POINT2:
+                     return sparseChannelMatrixMultiply<AUDIO_CHANNEL_OUT_22POINT2,
+                            OUTPUT_CHANNEL_MASK, ACCUMULATE>(src, dst, frameCount);
+                default:
+                    break; // handled below.
+                }
             }
         }
         return matrixProcess(src, dst, frameCount, ACCUMULATE);
@@ -264,11 +658,11 @@
      * Converts a source audio stream to destination audio stream with a matrix
      * channel conversion.
      *
-     * \param src          multichannel audio buffer to downmix
-     * \param dst          downmixed stereo audio samples
-     * \param frameCount   number of multichannel frames to downmix
-     * \param accumulate   is true if the downmix is added to the destination or
-     *                     false if the downmix replaces the destination.
+     * \param src          multichannel audio buffer to remix
+     * \param dst          remixed audio samples
+     * \param frameCount   number of multichannel frames to remix
+     * \param accumulate   is true if the remix is added to the destination or
+     *                     false if the remix replaces the destination.
      *
      * \return false if the CHANNEL_COUNT is not supported.
      */
@@ -276,92 +670,27 @@
         // matrix multiply
         if (mInputChannelMask == AUDIO_CHANNEL_NONE) return false;
         while (frameCount) {
-            float ch[2]{}; // left, right
-            for (size_t i = 0; i < mLastValidChannelIndexPlusOne; ++i) {
-                ch[0] += mMatrix[i][0] * src[i];
-                ch[1] += mMatrix[i][1] * src[i];
+            float ch[mOutputChannelCount]{};
+            for (size_t i = 0; i < mInputChannelCount; ++i) {
+                const float (&array)[mOutputChannelCount] = mMatrix[i];
+                for (size_t j = 0; j < mOutputChannelCount; ++j) {
+                    ch[j] += array[j] * src[i];
+                }
             }
             if (accumulate) {
-                ch[0] += dst[0];
-                ch[1] += dst[1];
+                for (size_t j = 0; j < mOutputChannelCount; ++j) {
+                    ch[j] += dst[j];
+                }
             }
-            dst[0] = clamp(ch[0]);
-            dst[1] = clamp(ch[1]);
+            for (size_t j = 0; j < mOutputChannelCount; ++j) {
+                dst[j] = clamp(ch[j]);
+            }
             src += mInputChannelCount;
             dst += mOutputChannelCount;
             --frameCount;
         }
         return true;
     }
-
-    /**
-     * Downmixes to stereo a multichannel signal of specified number of channels
-     *
-     * CHANNEL_COUNT is the number of channels of the src input.
-     * ACCUMULATE is true if the downmix is added to the destination or
-     *               false if the downmix replaces the destination.
-     *
-     * \param src          multichannel audio buffer to downmix
-     * \param dst          downmixed stereo audio samples
-     * \param frameCount   number of multichannel frames to downmix
-     *
-     * \return false if the CHANNEL_COUNT is not supported.
-     */
-    template <int CHANNEL_COUNT, bool ACCUMULATE>
-    static bool specificProcess(const float *src, float *dst, size_t frameCount) {
-        while (frameCount > 0) {
-            float ch[2]; // left, right
-            if constexpr (CHANNEL_COUNT == 4) { // QUAD
-                // sample at index 0 is FL
-                // sample at index 1 is FR
-                // sample at index 2 is RL (or SL)
-                // sample at index 3 is RR (or SR)
-                // FL + RL
-                ch[0] = src[0] + src[2] * MINUS_3_DB_IN_FLOAT;
-                // FR + RR
-                ch[1] = src[1] + src[3] * MINUS_3_DB_IN_FLOAT;
-            } else if constexpr (CHANNEL_COUNT == 6) { // 5.1
-                // sample at index 0 is FL
-                // sample at index 1 is FR
-                // sample at index 2 is FC
-                // sample at index 3 is LFE
-                // sample at index 4 is RL (or SL)
-                // sample at index 5 is RR (or SR)
-                const float centerPlusLfeContrib = src[2] + src[3] * MINUS_3_DB_IN_FLOAT;
-                // FL + RL + centerPlusLfeContrib
-                ch[0] = src[0] + (src[4] + centerPlusLfeContrib) * MINUS_3_DB_IN_FLOAT;
-                // FR + RR + centerPlusLfeContrib
-                ch[1] = src[1] + (src[5] + centerPlusLfeContrib) * MINUS_3_DB_IN_FLOAT;
-            } else if constexpr (CHANNEL_COUNT == 8) { // 7.1
-                // sample at index 0 is FL
-                // sample at index 1 is FR
-                // sample at index 2 is FC
-                // sample at index 3 is LFE
-                // sample at index 4 is RL
-                // sample at index 5 is RR
-                // sample at index 6 is SL
-                // sample at index 7 is SR
-                const float centerPlusLfeContrib = src[2] + src[3] * MINUS_3_DB_IN_FLOAT;
-                // FL + RL + SL + centerPlusLfeContrib
-                ch[0] = src[0] + (src[4] + src[6] + centerPlusLfeContrib) * MINUS_3_DB_IN_FLOAT;
-                // FR + RR + SR + centerPlusLfeContrib
-                ch[1] = src[1] + (src[5] + src[7] + centerPlusLfeContrib) * MINUS_3_DB_IN_FLOAT;
-            } else {
-                return false;
-            }
-            if constexpr (ACCUMULATE) {
-                dst[0] = clamp(dst[0] + ch[0]);
-                dst[1] = clamp(dst[1] + ch[1]);
-            } else {
-                dst[0] = clamp(ch[0]);
-                dst[1] = clamp(ch[1]);
-            }
-            src += CHANNEL_COUNT;
-            dst += mOutputChannelCount;
-            --frameCount;
-        }
-        return true;
-    }
 };
 
 } // android::audio_utils::channels
diff --git a/audio_utils/include/audio_utils/Histogram.h b/audio_utils/include/audio_utils/Histogram.h
index f85f20b..74d675e 100644
--- a/audio_utils/include/audio_utils/Histogram.h
+++ b/audio_utils/include/audio_utils/Histogram.h
@@ -17,6 +17,7 @@
 #ifndef AUDIO_UTILS_HISTOGRAM_H
 #define AUDIO_UTILS_HISTOGRAM_H
 
+#include <assert.h>
 #include <memory>
 #include <sstream>
 #include <vector>
diff --git a/audio_utils/include/audio_utils/MelAggregator.h b/audio_utils/include/audio_utils/MelAggregator.h
new file mode 100644
index 0000000..cf262bb
--- /dev/null
+++ b/audio_utils/include/audio_utils/MelAggregator.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/MelProcessor.h>
+#include <map>
+#include <mutex>
+
+namespace android::audio_utils {
+
+struct MelRecord {
+    /** The port ID of the audio device where the MEL value was recorded. */
+    audio_port_handle_t portId;
+    /**
+     * Array of continuously recorded MEL values >= RS1 (1 per second). First
+     * value in the array was recorded at time: timestamp.
+     */
+    std::vector<float> mels;
+    /** Corresponds to the time when the first MEL entry in MelRecord was recorded. */
+    int64_t timestamp;
+
+    MelRecord(audio_port_handle_t portId,
+              std::vector<float> mels,
+              int64_t timestamp)
+        : portId(portId), mels(std::move(mels)), timestamp(timestamp) {}
+
+    inline bool overlapsEnd(const MelRecord& record) const {
+        return timestamp + static_cast<int64_t>(mels.size()) > record.timestamp;
+    }
+};
+
+struct CsdRecord {
+    /** Corresponds to the time when the CSD value is calculated from. */
+    const int64_t timestamp;
+    /** Corresponds to the duration that leads to the CSD value. */
+    const size_t duration;
+    /** The actual contribution to the CSD computation normalized: 1.f is 100%CSD. */
+    const float value;
+    /** The average MEL value that lead to this CSD value. */
+    const float averageMel;
+
+    CsdRecord(int64_t timestamp,
+              size_t duration,
+              float value,
+              float averageMel)
+        : timestamp(timestamp),
+          duration(duration),
+          value(value),
+          averageMel(averageMel) {};
+};
+
+/**
+ * Class used to aggregate MEL values from different streams that play sounds
+ * simultaneously.
+ *
+ * The public methods are internally protected by a mutex to be thread-safe.
+ */
+class MelAggregator : public RefBase {
+public:
+
+    explicit MelAggregator(int64_t csdWindowSeconds)
+        : mCsdWindowSeconds(csdWindowSeconds) {}
+
+    /**
+     * \returns the size of the stored CSD values.
+     */
+    size_t getCsdRecordsSize() const;
+
+    /**
+     * \brief Iterate over the CsdRecords and applies function f.
+     *
+     * \param f      function to apply on the iterated CsdRecord's sorted by timestamp
+     */
+    void foreachCsd(const std::function<void(const CsdRecord&)>& f) const;
+
+    /** Returns the current CSD computed with a rolling window of mCsdWindowSeconds. */
+    float getCsd();
+
+    /**
+     * \returns the size of the stored MEL records.
+     */
+    size_t getCachedMelRecordsSize() const;
+
+    /**
+     * \brief Iterate over the MelRecords and applies function f.
+     *
+     * \param f      function to apply on the iterated MelRecord's sorted by timestamp
+     */
+    void foreachCachedMel(const std::function<void(const MelRecord&)>& f) const;
+
+    /**
+     * New value are stored and MEL values that correspond to the same timestamp
+     * will be aggregated.
+     *
+     * \returns a vector containing all the new CsdRecord's that were added to
+     *   the current CSD value. Vector could be empty in case no new records
+     *   contributed to CSD.
+     */
+    std::vector<CsdRecord> aggregateAndAddNewMelRecord(const MelRecord& record);
+
+    /**
+     * Reset the aggregator values. Discards all the previous cached values and
+     * uses the passed records for the new callbacks.
+     **/
+    void reset(float newCsd, const std::vector<CsdRecord>& newRecords);
+private:
+    /** Locked aggregateAndAddNewMelRecord method. */
+    std::vector<CsdRecord> aggregateAndAddNewMelRecord_l(const MelRecord& record) REQUIRES(mLock);
+
+    void removeOldCsdRecords_l(std::vector<CsdRecord>& removeRecords) REQUIRES(mLock);
+
+    std::vector<CsdRecord> updateCsdRecords_l() REQUIRES(mLock);
+
+    int64_t csdTimeIntervalStored_l() REQUIRES(mLock);
+
+    std::map<int64_t, CsdRecord>::iterator addNewestCsdRecord_l(int64_t timestamp,
+                                                                int64_t duration,
+                                                                 float csdRecord,
+                                                                 float averageMel) REQUIRES(mLock);
+
+    const int64_t mCsdWindowSeconds;
+
+    mutable std::mutex mLock;
+
+    std::map<int64_t, MelRecord> mMelRecords GUARDED_BY(mLock);
+    std::map<int64_t, CsdRecord> mCsdRecords GUARDED_BY(mLock);
+
+    /** Current CSD value in mMelRecords. */
+    float mCurrentMelRecordsCsd GUARDED_BY(mLock) = 0.f;
+
+    /** CSD value containing sum of all CSD values stored. */
+    float mCurrentCsd GUARDED_BY(mLock) = 0.f;
+};
+
+}  // naemspace android::audio_utils
diff --git a/audio_utils/include/audio_utils/MelProcessor.h b/audio_utils/include/audio_utils/MelProcessor.h
new file mode 100644
index 0000000..4801486
--- /dev/null
+++ b/audio_utils/include/audio_utils/MelProcessor.h
@@ -0,0 +1,247 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <condition_variable>
+#include <mutex>
+#include <thread>
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/BiquadFilter.h>
+#include <system/audio.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android::audio_utils {
+
+/**
+ * Class used to capture the MEL (momentary exposure levels) values as defined
+ * by IEC62368-1 3rd edition. MELs are computed for each second.
+ */
+class MelProcessor : public RefBase {
+public:
+
+    static constexpr int kCascadeBiquadNumber = 3;
+    /** Should represent the minimal value after which a 1% CSD change can occur. */
+    static constexpr int32_t kMaxMelValues = 3;
+
+    /**
+     * An interface through which the MelProcessor client will be notified about
+     * important events.
+     */
+    class MelCallback : public virtual RefBase {
+    public:
+        ~MelCallback() override = default;
+        /**
+         * Called with a time-continuous vector of computed MEL values
+         *
+         * \param mels     contains MELs (one per second) with values above RS1.
+         * \param offset   the offset in mels for new MEL data.
+         * \param length   the number of valid MEL values in the vector starting at offset. The
+         *                 maximum number of elements in mels is defined in the MelProcessor
+         *                 constructor.
+         * \param deviceId id of device where the samples were processed
+         */
+        virtual void onNewMelValues(const std::vector<float>& mels,
+                                    size_t offset,
+                                    size_t length,
+                                    audio_port_handle_t deviceId) const = 0;
+
+        /**
+         * Called when the momentary exposure exceeds the RS2 upper bound.
+         *
+         * Note: RS2 is configurable vie MelProcessor#setOutputRs2UpperBound.
+         */
+        virtual void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const = 0;
+    };
+
+    /**
+     * \brief Creates a MelProcessor object.
+     *
+     * \param sampleRate        sample rate of the audio data.
+     * \param channelCount      channel count of the audio data.
+     * \param format            format of the audio data. It must be allowed by
+     *                          audio_utils_is_compute_mel_format_supported()
+     *                          else the constructor will abort.
+     * \param callback          reports back the new mel values.
+     * \param deviceId          the device ID for the MEL callbacks
+     * \param rs2Value          initial RS2 upper bound to use
+     * \param maxMelsCallback   the number of max elements a callback can have.
+     */
+    MelProcessor(uint32_t sampleRate,
+                 uint32_t channelCount,
+                 audio_format_t format,
+                 const sp<MelCallback>& callback,
+                 audio_port_handle_t deviceId,
+                 float rs2Value,
+                 size_t maxMelsCallback = kMaxMelValues);
+
+    /**
+     * Sets the output RS2 upper bound for momentary exposure warnings. Default value
+     * is 100dBA as specified in IEC62368-1 3rd edition. Must not be higher than
+     * 100dBA and not lower than 80dBA.
+     *
+     * \param rs2Value to use for momentary exposure
+     * \return NO_ERROR if rs2Value is between 80dBA amd 100dBA or BAD_VALUE
+     *   otherwise
+     */
+    status_t setOutputRs2UpperBound(float rs2Value);
+
+    /** Returns the RS2 upper bound used for momentary exposures. */
+    float getOutputRs2UpperBound() const;
+
+    /** Updates the device id. */
+    void setDeviceId(audio_port_handle_t deviceId);
+
+    /** Returns the device id. */
+    audio_port_handle_t getDeviceId();
+
+    /** Update the format to use for the input frames to process. */
+    void updateAudioFormat(uint32_t sampleRate, uint32_t channelCount, audio_format_t newFormat);
+
+    /**
+     * \brief Computes the MEL values for the given buffer and triggers a
+     * callback with time-continuous MEL values when: MEL buffer is full or if
+     * there is a discontinue in MEL calculation (e.g.: MEL is under RS1)
+     *
+     * \param buffer           pointer to the audio data buffer.
+     * \param bytes            buffer size in bytes.
+     *
+     * \return the number of bytes that were processed. Note: the method will
+     *   output 0 if the processor is paused or the sample rate is not supported.
+     */
+    int32_t process(const void* buffer, size_t bytes);
+
+    /**
+     * Pauses the processing of MEL values. Process calls after this will be
+     * ignored until resume.
+     */
+    void pause();
+
+    /** Resumes the processing of MEL values. */
+    void resume();
+
+    /**
+     * Sets the given attenuation for the MEL calculation. This can be used when
+     * the audio framework is operating in absolute volume mode.
+     *
+     * @param attenuationDB    attenuation to use on computed MEL values
+     */
+    void setAttenuation(float attenuationDB);
+
+    void onLastStrongRef(const void* id) override;
+
+private:
+    /** Struct to store the possible callback data. */
+    struct MelCallbackData {
+        // used for momentaryExposure callback
+        float mMel = 0.f;
+        // used for newMelValues callback
+        std::vector<float> mMels = std::vector<float>(kMaxMelValues);
+        // represents the number of valid MEL values in mMels
+        size_t mMelsSize = 0;
+        // port of deviceId for this callback
+        audio_port_handle_t mPort = AUDIO_PORT_HANDLE_NONE;
+    };
+
+    // class used to asynchronously execute all MelProcessor callbacks
+    class MelWorker {
+    public:
+        static constexpr int kRingBufferSize = 32;
+
+        MelWorker(std::string threadName, const wp<MelCallback>& callback)
+            : mCallback(callback),
+              mThreadName(std::move(threadName)),
+              mCallbackRingBuffer(kRingBufferSize) {};
+
+        void run();
+
+        // blocks until the MelWorker thread is stopped
+        void stop();
+
+        // callback methods for new MEL values
+        void momentaryExposure(float mel, audio_port_handle_t port);
+        void newMelValues(const std::vector<float>& mels,
+                          size_t melsSize,
+                          audio_port_handle_t port);
+
+        static void incRingBufferIndex(std::atomic_size_t& idx);
+        bool ringBufferIsFull() const;
+
+        const wp<MelCallback> mCallback;
+        const std::string mThreadName;
+        std::vector<MelCallbackData> mCallbackRingBuffer GUARDED_BY(mCondVarMutex);
+
+        std::atomic_size_t mRbReadPtr = 0;
+        std::atomic_size_t mRbWritePtr = 0;
+
+        std::thread mThread;
+        std::condition_variable mCondVar;
+        std::mutex mCondVarMutex;
+        bool mStopRequested GUARDED_BY(mCondVarMutex) = false;
+    };
+
+    std::string pointerString() const;
+    void createBiquads_l() REQUIRES(mLock);
+    bool isSampleRateSupported_l() const REQUIRES(mLock);
+    void applyAWeight_l(const void* buffer, size_t frames) REQUIRES(mLock);
+    float getCombinedChannelEnergy_l() REQUIRES(mLock);
+    void addMelValue_l(float mel) REQUIRES(mLock);
+
+    const wp<MelCallback> mCallback;           // callback to notify about new MEL values
+                                               // and momentary exposure warning
+                                               // does not own the callback, must outlive
+
+    MelWorker mMelWorker;                      // spawns thread for asynchronous callbacks,
+                                               // worker is thread-safe
+
+    mutable std::mutex mLock;                  // monitor mutex
+    // audio data sample rate
+    uint32_t mSampleRate GUARDED_BY(mLock);
+    // number of audio frames per MEL value
+    size_t mFramesPerMelValue GUARDED_BY(mLock);
+    // audio data channel count
+    uint32_t mChannelCount GUARDED_BY(mLock);
+    // audio data format
+    audio_format_t mFormat GUARDED_BY(mLock);
+    // contains the A-weighted input samples to be processed
+    std::vector<float> mAWeightSamples GUARDED_BY(mLock);
+    // contains the input samples converted to float
+    std::vector<float> mFloatSamples GUARDED_BY(mLock);
+    // local energy accumulation
+    std::vector<float> mCurrentChannelEnergy GUARDED_BY(mLock);
+    // accumulated MEL values
+    std::vector<float> mMelValues GUARDED_BY(mLock);
+    // current index to store the MEL values
+    uint32_t mCurrentIndex GUARDED_BY(mLock);
+    using DefaultBiquadFilter = BiquadFilter<float, true, details::DefaultBiquadConstOptions>;
+    // Biquads used for the A-weighting
+    std::array<std::unique_ptr<DefaultBiquadFilter>, kCascadeBiquadNumber>
+        mCascadedBiquads GUARDED_BY(mLock);
+
+    std::atomic<float> mAttenuationDB = 0.f;
+    // device id used for the callbacks
+    std::atomic<audio_port_handle_t> mDeviceId;
+    // Value used for momentary exposure
+    std::atomic<float> mRs2UpperBound;
+    // number of samples in the energy
+    std::atomic_size_t mCurrentSamples;
+    std::atomic_bool mPaused;
+};
+
+}  // namespace android::audio_utils
diff --git a/audio_utils/include/audio_utils/TimestampVerifier.h b/audio_utils/include/audio_utils/TimestampVerifier.h
index ed27cac..fa9c0a2 100644
--- a/audio_utils/include/audio_utils/TimestampVerifier.h
+++ b/audio_utils/include/audio_utils/TimestampVerifier.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AUDIO_UTILS_TIMESTAMP_VERIFIER_H
 #define ANDROID_AUDIO_UTILS_TIMESTAMP_VERIFIER_H
 
+#include <assert.h>
 #include <audio_utils/clock.h>
 #include <audio_utils/Statistics.h>
 
diff --git a/audio_utils/include/audio_utils/hal_smoothness.h b/audio_utils/include/audio_utils/hal_smoothness.h
new file mode 100644
index 0000000..9032e91
--- /dev/null
+++ b/audio_utils/include/audio_utils/hal_smoothness.h
@@ -0,0 +1,122 @@
+/*
+** Copyright 2022, The Android Open-Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// This library is used to help collect audio smoothness metrics from an
+// implementation of an Audio Hardware Abstraction Layer (HAL). This is
+// primarily used to count xruns and number of frames written and lost for an
+// audio stream.
+//
+// To use this library, create an instance of the struct "hal_smoothness" using
+// the "initialize()" method. The "client_flush_cb" callback method needs to be
+// defined, which will be called when the library thinks it is time to flush
+// metrics data, which is when "num_writes_to_logs" == "total_writes". The
+// callback's purpose is so that the client can flush these metrics wherever
+// it wants (ie. flushing it to a metrics server or to logcat). A
+// "hal_smoothness_metrics" will be passed into it. After the callback has
+// finished, all values will be set back to 0.
+
+#pragma once
+
+#include <sys/cdefs.h>
+
+#define HAL_SMOOTHNESS_VERSION_1 1
+
+__BEGIN_DECLS
+
+// Audio HAL smoothness metrics that can be read by the client. These metrics,
+// get reset after every flush to the client's callback.
+struct hal_smoothness_metrics {
+  // Count of underruns, right before a flush.
+  unsigned int underrun_count;
+
+  // Count of overruns, right before a flush.
+  unsigned int overrun_count;
+
+  // Count of times audio samples are written to the endpoint buffer, right
+  // before a flush.
+  unsigned int total_writes;
+
+  // Total number of frames written, right before a flush.
+  unsigned int total_frames_written;
+
+  // Total number of frames lost, right before a flush.
+  unsigned int total_frames_lost;
+
+  // Smoothness value calculated by library right before calling the flush
+  // callback:
+  // -ln(total_frames_lost/(total_frames_written + total_frames_lost))
+  //
+  // If "total_frames_lost" is 0, that would imply perfect audio quality in
+  // terms of no bytes were dropped. In this case, DBL_MAX will be returned.
+  double smoothness_value;
+
+  // Timestamp of when these metrics were flushed. It is up to the client to
+  // decide which clock and what granularity to use. However, it is recommended
+  // to use the system clock CLOCK_REALTIME.
+  unsigned long timestamp;
+};
+
+// Used by the audio HAL implementor to help with collection of audio smoothness
+// metrics.
+struct hal_smoothness {
+  // Struct version.
+  unsigned int version;
+
+  // Increments “underrun_count” and "total_frames_lost".
+  //
+  // returns 0 if successful and non-zero on failure.
+  int (*increment_underrun)(struct hal_smoothness *smoothness,
+                            unsigned int frames_lost);
+
+  // Increments “overrun_count” and "total_frames_lost".
+  //
+  // returns 0 if successful and non-zero on failure.
+  int (*increment_overrun)(struct hal_smoothness *smoothness,
+                           unsigned int frames_lost);
+
+  // Increments “total_writes” and "total_frames_written". Once “total_writes >=
+  // num_writes_to_logs”, “client_flush_cb” will be triggered and all the ints
+  // in "hal_smoothness_metrics" will be reset to 0.
+  //
+  // returns 0 if successful and non-zero on failure.
+  int (*increment_total_writes)(struct hal_smoothness *smoothness,
+                                unsigned int frames_written,
+                                unsigned long timestamp);
+
+  // Manual flush. Will call "client_flush_cb" and reset "hal_smoothness_metrics".
+  int (*flush)(struct hal_smoothness *smoothness);
+};
+
+// version: hal_smoothness library version.
+//
+// num_writes_to_log: number of writes before we flushing
+//
+// client_flush_cb: Client defined flush method. The library will pass in
+// "hal_smoothness_metrics", which will be generated by the library, and
+// "private_data", which is provided by the client.
+//
+// private_data: Client defined data that is passed into “client_flush_cb”
+//
+// returns 0 if successful and non-zero on failure.
+int hal_smoothness_initialize(
+    struct hal_smoothness **smoothness, unsigned int version,
+    unsigned int num_writes_to_log,
+    void (*client_flush_cb)(struct hal_smoothness_metrics *, void *),
+    void *private_data);
+
+void hal_smoothness_free(struct hal_smoothness **smoothness);
+
+__END_DECLS
diff --git a/audio_utils/include/audio_utils/power.h b/audio_utils/include/audio_utils/power.h
index 385b6a7..8ee2ee2 100644
--- a/audio_utils/include/audio_utils/power.h
+++ b/audio_utils/include/audio_utils/power.h
@@ -63,6 +63,31 @@
 float audio_utils_compute_energy_mono(const void *buffer, audio_format_t format, size_t samples);
 
 /**
+ * \brief Compute for each channel signal energy (sum of squared amplitudes).
+ *
+ *   \param buffer       buffer of samples.
+ *   \param format       one of AUDIO_FORMAT_PCM_8_BIT, AUDIO_FORMAT_PCM_16_BIT,
+ *                       AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_8_24_BIT,
+ *                       AUDIO_FORMAT_PCM_32_BIT, AUDIO_FORMAT_PCM_FLOAT.
+ *   \param samples      number of samples in buffer.  This is not audio frames;
+ *                       usually the number of samples is the number of audio frames
+ *                       multiplied by channel count.
+ *   \param numChannels  the number of channels for which the energy is computed.
+ *   \param out          interleaved buffer containing for each channel the sample
+ *                       energy. Must be initialized with zero values or pre-existing
+ *                       energy to accumulate to.
+ *
+ * \return
+ *   out array is updated by adding for each channel the signal energy of the samples
+ *   in the buffer (sum of squares).
+ */
+void audio_utils_accumulate_energy(const void* buffer,
+                                   audio_format_t format,
+                                   size_t samples,
+                                   size_t numChannels,
+                                   float* out);
+
+/**
  * \brief  Returns true if the format is supported for compute_energy_for_mono()
  *         and compute_power_for_mono().
  * \param  format        format under consideration.
diff --git a/audio_utils/power.cpp b/audio_utils/power.cpp
index 47de41d..58fa917 100644
--- a/audio_utils/power.cpp
+++ b/audio_utils/power.cpp
@@ -130,11 +130,30 @@
 }
 
 template <audio_format_t FORMAT>
+inline void energyRef(const void *amplitudes, size_t size, size_t numChannels, float* out)
+{
+    const size_t framesSize = size / numChannels;
+    for (size_t i = 0; i < framesSize; ++i) {
+        for (size_t c = 0; c < numChannels; ++c) {
+            const float amplitude = convertToFloatAndIncrement<FORMAT>(&amplitudes);
+            out[c] += amplitude * amplitude;
+        }
+    }
+}
+
+template <audio_format_t FORMAT>
 inline float energyMono(const void *amplitudes, size_t size)
 {
     return energyMonoRef<FORMAT>(amplitudes, size);
 }
 
+// TODO: optimize with NEON
+template <audio_format_t FORMAT>
+inline void energy(const void *amplitudes, size_t size, size_t numChannels, float* out)
+{
+    energyRef<FORMAT>(amplitudes, size, numChannels, out);
+}
+
 // fast float power computation for ARM processors that support NEON.
 #ifdef USE_NEON
 
@@ -265,6 +284,42 @@
     }
 }
 
+void audio_utils_accumulate_energy(const void* buffer,
+                                   audio_format_t format,
+                                   size_t samples,
+                                   size_t numChannels,
+                                   float* out)
+{
+    switch (format) {
+    case AUDIO_FORMAT_PCM_8_BIT:
+        energy<AUDIO_FORMAT_PCM_8_BIT>(buffer, samples, numChannels, out);
+        break;
+
+    case AUDIO_FORMAT_PCM_16_BIT:
+        energy<AUDIO_FORMAT_PCM_16_BIT>(buffer, samples, numChannels, out);
+        break;
+
+    case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        energy<AUDIO_FORMAT_PCM_24_BIT_PACKED>(buffer, samples, numChannels, out);
+        break;
+
+    case AUDIO_FORMAT_PCM_8_24_BIT:
+        energy<AUDIO_FORMAT_PCM_8_24_BIT>(buffer, samples, numChannels, out);
+        break;
+
+    case AUDIO_FORMAT_PCM_32_BIT:
+        energy<AUDIO_FORMAT_PCM_32_BIT>(buffer, samples, numChannels, out);
+        break;
+
+    case AUDIO_FORMAT_PCM_FLOAT:
+        energy<AUDIO_FORMAT_PCM_FLOAT>(buffer, samples, numChannels, out);
+        break;
+
+    default:
+        LOG_ALWAYS_FATAL("invalid format: %#x", format);
+    }
+}
+
 float audio_utils_compute_power_mono(const void *buffer, audio_format_t format, size_t samples)
 {
     return audio_utils_power_from_energy(
diff --git a/audio_utils/resampler.c b/audio_utils/resampler.c
index 3f1f8a4..16f25f6 100644
--- a/audio_utils/resampler.c
+++ b/audio_utils/resampler.c
@@ -50,12 +50,12 @@
 static void resampler_reset(struct resampler_itfe *resampler)
 {
     struct resampler *rsmp = (struct resampler *)resampler;
-
-    rsmp->frames_in = 0;
-    rsmp->frames_rq = 0;
-
-    if (rsmp != NULL && rsmp->speex_resampler != NULL) {
-        speex_resampler_reset_mem(rsmp->speex_resampler);
+    if (rsmp != NULL) {
+        rsmp->frames_in = 0;
+        rsmp->frames_rq = 0;
+        if (rsmp->speex_resampler != NULL) {
+            speex_resampler_reset_mem(rsmp->speex_resampler);
+        }
     }
 }
 
diff --git a/audio_utils/spdif/AC3FrameScanner.cpp b/audio_utils/spdif/AC3FrameScanner.cpp
index 53094c5..5f01a04 100644
--- a/audio_utils/spdif/AC3FrameScanner.cpp
+++ b/audio_utils/spdif/AC3FrameScanner.cpp
@@ -184,13 +184,23 @@
 
     // bitstream mode, main, commentary, etc.
     uint32_t bsmod = mHeaderBuffer[5] & 7;
-    mDataTypeInfo = bsmod; // as per IEC61937-3, table 3.
+
+    mDataTypeInfo = 0;
 
     // The names fscod, frmsiz are from the AC3 spec.
     uint32_t fscod = mHeaderBuffer[4] >> 6;
     if (mDataType == SPDIF_DATA_TYPE_E_AC3) {
         mStreamType = mHeaderBuffer[2] >> 6; // strmtyp in spec
         mSubstreamID = (mHeaderBuffer[2] >> 3) & 0x07;
+        // For EAC3 stream, only set data-type-dependent information as the value of
+        // bsmod in independent substream 0 of EAC3 elementary stream.
+        if (mStreamType != 1 && mSubstreamID == 0) {
+            const int infomdate = (mHeaderBuffer[5] >> 3) & 1;
+            if (infomdate == 1) {
+                mDataTypeInfo = bsmod;
+            }
+        }
+
 
         // Frame size is explicit in EAC3. Paragraph E2.3.1.3
         uint32_t frmsiz = ((mHeaderBuffer[2] & 0x07) << 8) + mHeaderBuffer[3];
@@ -231,6 +241,7 @@
                 "EAC3 mStreamType = %d, mSubstreamID = %d",
                 mStreamType, mSubstreamID);
     } else { // regular AC3
+        mDataTypeInfo = bsmod; // as per IEC61937-3, table 3.
         // Extract sample rate and frame size from codes.
         uint32_t frmsizcod = mHeaderBuffer[4] & 0x3F; // frame size code
 
diff --git a/audio_utils/tests/Android.bp b/audio_utils/tests/Android.bp
index ebba33e..b745dcb 100644
--- a/audio_utils/tests/Android.bp
+++ b/audio_utils/tests/Android.bp
@@ -47,6 +47,56 @@
 }
 
 cc_test {
+    name: "mel_aggregator_tests",
+    host_supported: true,
+
+    srcs: [
+        "mel_aggregator_tests.cpp"
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+cc_test {
+    name: "mel_processor_tests",
+    host_supported: true,
+
+    srcs: [
+        "mel_processor_tests.cpp"
+    ],
+
+    shared_libs: [
+        "libaudioutils",
+        "liblog",
+        "libutils",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+cc_test {
     name: "biquad_filter_tests",
     host_supported: true,
 
@@ -109,6 +159,26 @@
 }
 
 cc_test {
+    name: "hal_smoothness_tests",
+    host_supported: true,
+
+    srcs: [
+        "hal_smoothness_tests.cpp",
+    ],
+
+    static_libs: [
+        "libaudioutils",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+}
+
+cc_test {
     name: "intrinsic_tests",
     host_supported: true,
 
@@ -483,6 +553,22 @@
 }
 
 cc_test {
+    name: "spatializer_utils_tests",
+    host_supported: true,
+
+    shared_libs: [
+        "libaudioutils",
+        "libcutils",
+        "liblog",
+    ],
+    srcs: ["spatializer_utils_tests.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_test {
     name: "spdif_tests",
     host_supported: true,
 
diff --git a/audio_utils/tests/build_and_run_all_unit_tests.sh b/audio_utils/tests/build_and_run_all_unit_tests.sh
index 3ca4b6e..68b28ce 100755
--- a/audio_utils/tests/build_and_run_all_unit_tests.sh
+++ b/audio_utils/tests/build_and_run_all_unit_tests.sh
@@ -71,3 +71,11 @@
 echo "benchmarking primitives"
 adb push $OUT/system/bin/primitives_benchmark /system/bin
 adb shell /system/bin/primitives_benchmark
+
+echo "melaggregator tests"
+adb push $OUT/data/nativetest/mel_aggregator_tests/mel_aggregator_tests /system/bin
+adb shell /system/bin/mel_aggregator_tests
+
+echo "melprocessor tests"
+adb push $OUT/data/nativetest/mel_processor_tests/mel_processor_tests /system/bin
+adb shell /system/bin/mel_processor_tests
diff --git a/audio_utils/tests/build_and_run_biquad_filter.sh b/audio_utils/tests/build_and_run_biquad_filter.sh
index 5964eaf..7ab27ec 100755
--- a/audio_utils/tests/build_and_run_biquad_filter.sh
+++ b/audio_utils/tests/build_and_run_biquad_filter.sh
@@ -23,7 +23,7 @@
 echo "========================================"
 echo "testing biquad filter"
 adb shell mkdir -p $testdir
-adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/res/raw/sinesweepraw.raw $testdir
+adb push $ANDROID_BUILD_TOP/cts/tests/tests/media/audio/res/raw/sinesweepraw.raw $testdir
 
 adb push $OUT/system/bin/biquad_filter $testdir
 for ch in {1..8}
diff --git a/audio_utils/tests/channelmix_tests.cpp b/audio_utils/tests/channelmix_tests.cpp
index b52fedb..dfdc5c3 100644
--- a/audio_utils/tests/channelmix_tests.cpp
+++ b/audio_utils/tests/channelmix_tests.cpp
@@ -19,7 +19,14 @@
 #include <gtest/gtest.h>
 #include <log/log.h>
 
-static constexpr audio_channel_mask_t kChannelPositionMasks[] = {
+static constexpr audio_channel_mask_t kOutputChannelMasks[] = {
+    AUDIO_CHANNEL_OUT_STEREO,
+    AUDIO_CHANNEL_OUT_5POINT1, // AUDIO_CHANNEL_OUT_5POINT1_BACK
+    AUDIO_CHANNEL_OUT_7POINT1,
+    AUDIO_CHANNEL_OUT_7POINT1POINT4,
+};
+
+static constexpr audio_channel_mask_t kInputChannelMasks[] = {
     AUDIO_CHANNEL_OUT_FRONT_LEFT, // Legacy: the ChannelMix effect treats MONO as FRONT_LEFT only.
                                   // The AudioMixer interprets MONO as a special case requiring
                                   // channel replication, bypassing the ChannelMix effect.
@@ -132,23 +139,53 @@
     return result;
 }
 
-using ChannelMixParam = std::tuple<int /* channel mask */, int /* 0 = replace, 1 = accumulate */>;
+using ChannelMixParam = std::tuple<int /* output channel mask */,
+        int /* input channel mask */,
+        bool /* accumulate */>;
+
+// For ChannelMixParam tuple get.
+constexpr size_t OUTPUT_CHANNEL_MASK_POSITION = 0;
+constexpr size_t INPUT_CHANNEL_MASK_POSITION = 1;
+constexpr size_t ACCUMULATE_POSITION = 2;
+
 class ChannelMixTest : public ::testing::TestWithParam<ChannelMixParam> {
 public:
 
-    void testBalance(audio_channel_mask_t channelMask, bool accumulate) {
+    void testBalance(audio_channel_mask_t outputChannelMask,
+            audio_channel_mask_t inputChannelMask, bool accumulate) {
         using namespace ::android::audio_utils::channels;
 
         size_t frames = 100; // set to an even number (2, 4, 6 ... ) stream alternates +1, -1.
-        constexpr unsigned outChannels = FCC_2;
-        unsigned inChannels = audio_channel_count_from_out_mask(channelMask);
+        const unsigned outChannels = audio_channel_count_from_out_mask(outputChannelMask);
+        const unsigned inChannels = audio_channel_count_from_out_mask(inputChannelMask);
         std::vector<float> input(frames * inChannels);
         std::vector<float> output(frames * outChannels);
 
-        double savedPower[32][FCC_2]{};
-        for (unsigned i = 0, channel = channelMask; channel != 0; ++i) {
+        double savedPower[32 /* inChannels */][32 /* outChannels */]{};
+
+        // Precompute output channel geometry.
+        AUDIO_GEOMETRY_SIDE outSide[outChannels];  // what side that channel index is on
+        int outIndexToOffset[32] = {[0 ... 31] = -1};
+        int outPair[outChannels];  // is there a matching pair channel?
+        for (unsigned i = 0, channel = outputChannelMask; channel != 0; ++i) {
             const int index = __builtin_ctz(channel);
-            ASSERT_LT((size_t)index, ChannelMix::MAX_INPUT_CHANNELS_SUPPORTED);
+            outIndexToOffset[index] = i;
+            outSide[i] = sideFromChannelIdx(index);
+            outPair[i] = pairIdxFromChannelIdx(index);
+
+            const int channelBit = 1 << index;
+            channel &= ~channelBit;
+        }
+        for (unsigned i = 0; i < outChannels; ++i) {
+            if (outPair[i] >= 0 && outPair[i] < (signed)std::size(outIndexToOffset)) {
+                outPair[i] = outIndexToOffset[outPair[i]];
+            }
+        }
+
+        auto remix = IChannelMix::create(outputChannelMask);
+
+        for (unsigned i = 0, channel = inputChannelMask; channel != 0; ++i) {
+            const int index = __builtin_ctz(channel);
             const int pairIndex = pairIdxFromChannelIdx(index);
             const AUDIO_GEOMETRY_SIDE side = sideFromChannelIdx(index);
             const int channelBit = 1 << index;
@@ -172,7 +209,7 @@
             }
 
             // Do the channel mix
-            ChannelMix(channelMask).process(input.data(), output.data(), frames, accumulate);
+            remix->process(input.data(), output.data(), frames, accumulate, inputChannelMask);
 
             // if we accumulate, we need to subtract the initial data offset.
             if (accumulate) {
@@ -192,86 +229,131 @@
                 }
             }
 
-            auto stats = channelStatistics(output, FCC_2);
+            auto stats = channelStatistics(output, outChannels);
             // printf("power: %s %s\n", stats[0].toString().c_str(), stats[1].toString().c_str());
-            double power[FCC_2] = { stats[0].getPopVariance(), stats[1].getPopVariance() };
+            double power[outChannels];
+            for (size_t j = 0; j < outChannels; ++j) {
+                power[j] = stats[j].getPopVariance();
+            }
 
-            // Check symmetric power for pair channels on exchange of left/right position.
+            // Check symmetric power for pair channels on exchange of front left/right position.
             // to do this, we save previous power measurements.
             if (pairIndex >= 0 && pairIndex < index) {
-                EXPECT_NEAR_EPSILON(power[0], savedPower[pairIndex][1]);
-                EXPECT_NEAR_EPSILON(power[1], savedPower[pairIndex][0]);
+
+                for (unsigned j = 0; j < outChannels; ++j) {
+                    if (outPair[j] >= 0) {
+                        EXPECT_NEAR_EPSILON(power[j], savedPower[pairIndex][outPair[j]]);
+                        EXPECT_NEAR_EPSILON(power[outPair[j]], savedPower[pairIndex][j]);
+                    }
+                }
             }
-            savedPower[index][0] = power[0];
-            savedPower[index][1] = power[1];
+            for (unsigned j = 0; j < outChannels; ++j) {
+                savedPower[index][j] = power[j];
+            }
+
+            // For downmix to stereo, we compare exact values to a predefined matrix.
+            const bool checkExpectedPower = outputChannelMask == AUDIO_CHANNEL_OUT_STEREO;
+            constexpr size_t FL = 0;
+            constexpr size_t FR = 1;
 
             // Confirm exactly the mix amount prescribed by the existing ChannelMix effect.
             // For future changes to the ChannelMix effect, the nearness needs to be relaxed
             // to compare behavior S or earlier.
 
             constexpr float POWER_TOLERANCE = 0.001;
-            const float expectedPower =
+            const float expectedPower = checkExpectedPower ?
                     kScaleFromChannelIdxLeft[index] * kScaleFromChannelIdxLeft[index]
-                    + kScaleFromChannelIdxRight[index] * kScaleFromChannelIdxRight[index];
-            EXPECT_NEAR(expectedPower, power[0] + power[1], POWER_TOLERANCE);
+                    + kScaleFromChannelIdxRight[index] * kScaleFromChannelIdxRight[index] : 0;
+
+            if (checkExpectedPower) {
+                EXPECT_NEAR(expectedPower, power[FL] + power[FR], POWER_TOLERANCE);
+            }
             switch (side) {
             case AUDIO_GEOMETRY_SIDE_LEFT:
                 if (channelBit == AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER) {
                     break;
                 }
-                EXPECT_EQ(0.f, power[1]);
+                for (unsigned j = 0; j < outChannels; ++j) {
+                    if (outSide[j] == AUDIO_GEOMETRY_SIDE_RIGHT) {
+                        EXPECT_EQ(0.f, power[j]);
+                    }
+                }
                 break;
             case AUDIO_GEOMETRY_SIDE_RIGHT:
                 if (channelBit == AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER) {
                     break;
                 }
-                EXPECT_EQ(0.f, power[0]);
+                for (unsigned j = 0; j < outChannels; ++j) {
+                    if (outSide[j] == AUDIO_GEOMETRY_SIDE_LEFT) {
+                        EXPECT_EQ(0.f, power[j]);
+                    }
+                }
                 break;
             case AUDIO_GEOMETRY_SIDE_CENTER:
                 if (channelBit == AUDIO_CHANNEL_OUT_LOW_FREQUENCY) {
-                    if (channelMask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) {
-                        EXPECT_EQ(0.f, power[1]);
+                    if (inputChannelMask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) {
+                        EXPECT_EQ(0.f, power[FR]);
                         break;
                     } else {
-                        EXPECT_NEAR_EPSILON(power[0], power[1]); // always true
-                        EXPECT_NEAR(expectedPower, power[0] + power[1], POWER_TOLERANCE);
+                        for (unsigned j = 0; j < outChannels; ++j) {
+                            if (outPair[j] >= 0) {
+                                EXPECT_NEAR_EPSILON(power[j], power[outPair[j]]);
+                            }
+                        }
+                        if (checkExpectedPower) {
+                            EXPECT_NEAR(expectedPower, power[FL] + power[FR], POWER_TOLERANCE);
+                        }
                         break;
                     }
                 } else if (channelBit == AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) {
-                    EXPECT_EQ(0.f, power[0]);
-                    EXPECT_NEAR(expectedPower, power[1], POWER_TOLERANCE);
+                    EXPECT_EQ(0.f, power[FL]);
+                    if (checkExpectedPower) {
+                        EXPECT_NEAR(expectedPower, power[FR], POWER_TOLERANCE);
+                    }
                     break;
                 }
-                EXPECT_NEAR_EPSILON(power[0], power[1]);
+                for (unsigned j = 0; j < outChannels; ++j) {
+                    if (outPair[j] >= 0) {
+                        EXPECT_NEAR_EPSILON(power[j], power[outPair[j]]);
+                    }
+                }
                 break;
             }
         }
     }
 };
 
-TEST_P(ChannelMixTest, basic) {
-    testBalance(kChannelPositionMasks[std::get<0>(GetParam())], (bool)std::get<1>(GetParam()));
-}
+static constexpr const char *kName1[] = {"_replace_", "_accumulate_"};
 
-static const char *kName1[] = {"_replace_", "_accumulate_"};
+TEST_P(ChannelMixTest, balance) {
+    testBalance(kOutputChannelMasks[std::get<OUTPUT_CHANNEL_MASK_POSITION>(GetParam())],
+            kInputChannelMasks[std::get<INPUT_CHANNEL_MASK_POSITION>(GetParam())],
+            std::get<ACCUMULATE_POSITION>(GetParam()));
+}
 
 INSTANTIATE_TEST_SUITE_P(
         ChannelMixTestAll, ChannelMixTest,
         ::testing::Combine(
-                ::testing::Range(0, (int)std::size(kChannelPositionMasks)),
-                ::testing::Range(0, 2)
+                ::testing::Range(0, (int)std::size(kOutputChannelMasks)),
+                ::testing::Range(0, (int)std::size(kInputChannelMasks)),
+                ::testing::Bool() // accumulate off, on
                 ),
         [](const testing::TestParamInfo<ChannelMixTest::ParamType>& info) {
-            const int index = std::get<0>(info.param);
-            const audio_channel_mask_t channelMask = kChannelPositionMasks[index];
-            const std::string name = std::string(audio_channel_out_mask_to_string(channelMask)) +
-                    kName1[std::get<1>(info.param)] + std::to_string(index);
+            const int out_index = std::get<OUTPUT_CHANNEL_MASK_POSITION>(info.param);
+            const audio_channel_mask_t outputChannelMask = kOutputChannelMasks[out_index];
+            const int in_index = std::get<INPUT_CHANNEL_MASK_POSITION>(info.param);
+            const audio_channel_mask_t inputChannelMask = kInputChannelMasks[in_index];
+            const std::string name =
+                    std::string(audio_channel_out_mask_to_string(outputChannelMask)) +
+                    "_" + std::string(audio_channel_out_mask_to_string(inputChannelMask)) +
+                    kName1[std::get<ACCUMULATE_POSITION>(info.param)] + std::to_string(in_index);
             return name;
         });
 
+using StereoDownMix = android::audio_utils::channels::ChannelMix<AUDIO_CHANNEL_OUT_STEREO>;
 TEST(channelmix, input_channel_mask) {
     using namespace ::android::audio_utils::channels;
-    ChannelMix channelMix(AUDIO_CHANNEL_NONE);
+    StereoDownMix channelMix(AUDIO_CHANNEL_NONE);
 
     ASSERT_EQ(AUDIO_CHANNEL_NONE, channelMix.getInputChannelMask());
     ASSERT_TRUE(channelMix.setInputChannelMask(AUDIO_CHANNEL_OUT_STEREO));
diff --git a/audio_utils/tests/fifo_tests.cpp b/audio_utils/tests/fifo_tests.cpp
index b015810..8822959 100644
--- a/audio_utils/tests/fifo_tests.cpp
+++ b/audio_utils/tests/fifo_tests.cpp
@@ -19,6 +19,7 @@
 
 #include <errno.h>
 #include <limits.h>
+#include <memory>
 #include <stdlib.h>
 #include <string.h>
 #include <audio_utils/fifo.h>
@@ -96,8 +97,8 @@
         return EXIT_FAILURE;
     }
     size_t frameSize = sizeof(int16_t) * sfinfoin.channels;
-    int16_t *inputBuffer = new int16_t[sfinfoin.frames * sfinfoin.channels];
-    sf_count_t actualRead = sf_readf_short(sfin, inputBuffer, sfinfoin.frames);
+    std::unique_ptr<int16_t[]> inputBuffer(new int16_t[sfinfoin.frames * sfinfoin.channels]);
+    sf_count_t actualRead = sf_readf_short(sfin, inputBuffer.get(), sfinfoin.frames);
     if (actualRead != sfinfoin.frames) {
         fprintf(stderr, "%s: unexpected EOF or error\n", inputFile);
         sf_close(sfin);
@@ -105,11 +106,11 @@
     }
     sf_close(sfin);
 
-    int16_t *outputBuffer = new int16_t[sfinfoin.frames * sfinfoin.channels];
+    std::unique_ptr<int16_t[]> outputBuffer(new int16_t[sfinfoin.frames * sfinfoin.channels]);
     size_t framesWritten = 0;
     size_t framesRead = 0;
-    int16_t *fifoBuffer = new int16_t[frameCount * sfinfoin.channels];
-    audio_utils_fifo fifo(frameCount, frameSize, fifoBuffer, readerThrottlesWriter);
+    std::unique_ptr<int16_t[]> fifoBuffer(new int16_t[frameCount * sfinfoin.channels]);
+    audio_utils_fifo fifo(frameCount, frameSize, fifoBuffer.get(), readerThrottlesWriter);
     audio_utils_fifo_writer fifoWriter(fifo);
     audio_utils_fifo_reader fifoReader(fifo, readerThrottlesWriter);
     int fifoWriteCount = 0, fifoReadCount = 0;
@@ -210,10 +211,6 @@
             }
         }
     }
-    delete[] inputBuffer;
-    inputBuffer = NULL;
-    delete[] fifoBuffer;
-    fifoBuffer = NULL;
 
     printf("FIFO non-empty writes: %d, non-empty reads: %d\n", fifoWriteCount, fifoReadCount);
     printf("fill=%d, min=%d, max=%d\n", fifoFillLevel, minFillLevel, maxFillLevel);
@@ -229,9 +226,7 @@
         perror(outputFile);
         return EXIT_FAILURE;
     }
-    sf_count_t actualWritten = sf_writef_short(sfout, outputBuffer, framesRead);
-    delete[] outputBuffer;
-    outputBuffer = NULL;
+    sf_count_t actualWritten = sf_writef_short(sfout, outputBuffer.get(), framesRead);
 
     if (actualWritten != (sf_count_t) framesRead) {
         fprintf(stderr, "%s: unexpected error\n", outputFile);
diff --git a/audio_utils/tests/hal_smoothness_tests.cpp b/audio_utils/tests/hal_smoothness_tests.cpp
new file mode 100644
index 0000000..0038df6
--- /dev/null
+++ b/audio_utils/tests/hal_smoothness_tests.cpp
@@ -0,0 +1,350 @@
+#include <audio_utils/hal_smoothness.h>
+#include <errno.h>
+#include <float.h>
+#include <gtest/gtest.h>
+#include <limits.h>
+
+#include <memory>
+
+struct TestDeleter {
+  void operator()(hal_smoothness *p) { hal_smoothness_free(&p); }
+};
+
+struct custom_private_data {
+  bool ran_callback;
+
+  hal_smoothness_metrics metrics;
+};
+
+void custom_flush(hal_smoothness_metrics *metrics, void *private_data) {
+  custom_private_data *data = (custom_private_data *)private_data;
+
+  data->ran_callback = true;
+
+  memcpy(&data->metrics, metrics, sizeof(hal_smoothness_metrics));
+}
+
+class HalSmoothnessTest : public ::testing::Test {
+ protected:
+  void CommonSmoothnessInit(unsigned int num_writes_to_log) {
+    hal_smoothness *smoothness_init;
+    data = {};
+    data.ran_callback = false;
+    int result =
+        hal_smoothness_initialize(&smoothness_init, HAL_SMOOTHNESS_VERSION_1,
+                                  num_writes_to_log, custom_flush, &data);
+    ASSERT_EQ(result, 0);
+    smoothness = std::unique_ptr<hal_smoothness, TestDeleter>{smoothness_init};
+  }
+
+  std::unique_ptr<hal_smoothness, TestDeleter> smoothness;
+  custom_private_data data;
+};
+
+// Test that the callback runs after the total write count is equal to
+// "num_writes_to_log".
+TEST_F(HalSmoothnessTest, callback_should_run) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 1));
+  // Since "num_writes_to_log" is set to 1, after this write, the callback
+  // should run.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100,
+                                     /* timestamp */ 200);
+
+  EXPECT_EQ(data.ran_callback, true);
+}
+
+// Test that the callback should not run if the total write count is less than
+// "num_writes_to_log".
+TEST_F(HalSmoothnessTest, callback_should_not_run) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 2));
+
+  // Since "num_writes_to_log" is set to 2, after this write, the callback
+  // should NOT run.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100,
+                                     /* timestamp */ 200);
+  EXPECT_EQ(data.ran_callback, false);
+
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100,
+                                     /* timestamp */ 200);
+  EXPECT_EQ(data.ran_callback, true);
+}
+
+// Test that metric values in "struct hal_smoothness_metrics" that is passed
+// into the callback are correct.
+TEST_F(HalSmoothnessTest, verify_metrics) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 6));
+
+  unsigned int timestamp = 200;
+
+  // Simulate how these increment methods would be called during a real runtime.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000, timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost= */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100, timestamp++);
+  smoothness->increment_overrun(smoothness.get(), /* frames_lost */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100, timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000, timestamp);
+
+  EXPECT_EQ(data.metrics.underrun_count, 2U);
+  EXPECT_EQ(data.metrics.overrun_count, 1U);
+  EXPECT_EQ(data.metrics.total_writes, 6U);
+  EXPECT_EQ(data.metrics.total_frames_written, 3300U);
+  EXPECT_EQ(data.metrics.total_frames_lost, 2700U);
+  EXPECT_EQ(data.metrics.timestamp, timestamp);
+}
+
+// Test that metric values in "struct hal_smoothness_metrics" are reset after it
+// has met "num_writes_to_log".
+TEST_F(HalSmoothnessTest, verify_metrics_reset) {
+  const unsigned int num_write_to_log = 6;
+  ASSERT_NO_FATAL_FAILURE(HalSmoothnessTest::CommonSmoothnessInit(
+      /* num_writes_to_log= */ num_write_to_log));
+
+  int timestamp = 200;
+  // Simulate how these increment methods would be called during a real runtime.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000,
+                                     /* timestamp */ timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000,
+                                     /* timestamp */ timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost= */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100,
+                                     /* timestamp */ timestamp++);
+  smoothness->increment_overrun(smoothness.get(), /* frames_lost */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100,
+                                     /* timestamp */ timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100,
+                                     /* timestamp */ timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000,
+                                     /* timestamp */ timestamp++);
+
+  const unsigned int frames_written_on_write = 1000;
+  // At this point, metrics values should be reset. We will write 6 more times
+  // to trigger the callback again.
+  for (unsigned int i = 0; i < num_write_to_log; i++) {
+    // last timestamp will be 211 because 206 + 5.
+    smoothness->increment_total_writes(smoothness.get(),
+                                       /* frames_written= */
+                                       frames_written_on_write,
+                                       /* timestamp */ timestamp + i);
+  }
+
+  EXPECT_EQ(data.metrics.underrun_count, 0U);
+  EXPECT_EQ(data.metrics.overrun_count, 0U);
+  EXPECT_EQ(data.metrics.total_writes, 6U);
+  EXPECT_EQ(data.metrics.total_frames_written,
+            frames_written_on_write * num_write_to_log);
+  EXPECT_EQ(data.metrics.total_frames_lost, 0U);
+  EXPECT_EQ(data.metrics.timestamp, 211U);
+}
+
+// Test that metric values in "struct hal_smoothness_metrics" that is passed
+// into the callback are correct.
+TEST_F(HalSmoothnessTest, smoothness_value_10ish) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 5));
+
+  unsigned int timestamp = 200;
+  // Simulate how these increment methods would be called during a real runtime.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost */ 1);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 7999, timestamp++);
+
+  // -ln(1/40000)
+  EXPECT_FLOAT_EQ(data.metrics.smoothness_value, 10.596635);
+}
+
+TEST_F(HalSmoothnessTest, smoothness_value_6ish) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 5));
+
+  unsigned int timestamp = 200;
+  // Simulate how these increment methods would be called during a real runtime.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000, timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost */ 100);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 7900, timestamp++);
+
+  // -ln(1/400)
+  EXPECT_FLOAT_EQ(data.metrics.smoothness_value, 5.9914646);
+}
+
+TEST_F(HalSmoothnessTest, log_zero_smoothness_value) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 1));
+
+  // Simulate how these increment methods would be called during a real runtime.
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 8000,
+                                     /* timestamp */ 200);
+
+  // -ln(0). This should return DBL_MAX
+  EXPECT_FLOAT_EQ(data.metrics.smoothness_value, DBL_MAX);
+}
+
+TEST(hal_smoothness, init_fail_with_zero_num_writes_to_log) {
+  hal_smoothness *smoothness;
+  custom_private_data data;
+  int result = hal_smoothness_initialize(&smoothness, HAL_SMOOTHNESS_VERSION_1,
+                                         /* num_writes_to_log= */ 0,
+                                         custom_flush, &data);
+  EXPECT_EQ(result, -EINVAL);
+}
+
+TEST(hal_smoothness, init_pass_with_null_private_data) {
+  hal_smoothness *smoothness_init;
+  int result =
+      hal_smoothness_initialize(&smoothness_init, HAL_SMOOTHNESS_VERSION_1,
+                                /* num_writes_to_log= */ 6, custom_flush, NULL);
+  ASSERT_EQ(result, 0);
+  auto smoothness =
+      std::unique_ptr<hal_smoothness, TestDeleter>{smoothness_init};
+}
+
+TEST(hal_smoothness, hal_smoothness_free) {
+  hal_smoothness *smoothness;
+  custom_private_data data;
+  int result = hal_smoothness_initialize(&smoothness, HAL_SMOOTHNESS_VERSION_1,
+                                         /* num_writes_to_log= */ 6,
+                                         custom_flush, &data);
+  ASSERT_EQ(result, 0);
+
+  hal_smoothness_free(&smoothness);
+  EXPECT_EQ(smoothness, nullptr);
+}
+
+TEST(hal_smoothness, hal_smoothness_free_pass_in_null) {
+  hal_smoothness *smoothness;
+
+  hal_smoothness_free(&smoothness);
+  EXPECT_EQ(smoothness, nullptr);
+}
+
+// Excluded testing overflow for values that only increment by 1 (ie.
+// underrun_count, overrun_count, total_writes).
+TEST_F(HalSmoothnessTest, underrun_overflow) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 1));
+
+  ASSERT_EQ(smoothness->increment_underrun(smoothness.get(),
+                                           /* frames_lost= */ UINT_MAX),
+            0);
+  ASSERT_EQ(
+      smoothness->increment_underrun(smoothness.get(), /* frames_lost= */ 1),
+      -EOVERFLOW);
+}
+
+TEST_F(HalSmoothnessTest, overrun_overflow) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 1));
+
+  ASSERT_EQ(smoothness->increment_overrun(smoothness.get(),
+                                          /* frames_lost= */ UINT_MAX),
+            0);
+  ASSERT_EQ(
+      smoothness->increment_overrun(smoothness.get(), /* frames_lost= */ 1),
+      -EOVERFLOW);
+}
+
+TEST_F(HalSmoothnessTest, overflow_total_writes) {
+  ASSERT_NO_FATAL_FAILURE(
+      HalSmoothnessTest::CommonSmoothnessInit(/* num_writes_to_log= */ 2));
+
+  unsigned int timestamp = 200;
+  ASSERT_EQ(smoothness->increment_total_writes(smoothness.get(),
+                                               /* frames_written= */ UINT_MAX,
+                                               timestamp++),
+            0);
+  ASSERT_EQ(
+      smoothness->increment_total_writes(smoothness.get(),
+                                         /* frames_written= */ 1, timestamp++),
+      -EOVERFLOW);
+}
+
+TEST_F(HalSmoothnessTest, flush) {
+  const unsigned int num_write_to_log = 5;
+  ASSERT_NO_FATAL_FAILURE(HalSmoothnessTest::CommonSmoothnessInit(
+      /* num_writes_to_log= */ num_write_to_log));
+
+  unsigned int timestamp = 201;
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 1000, timestamp++);
+  smoothness->increment_underrun(smoothness.get(), /* frames_lost= */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100, timestamp++);
+  smoothness->increment_overrun(smoothness.get(), /* frames_lost */ 900);
+  smoothness->increment_total_writes(smoothness.get(),
+                                     /* frames_written= */ 100, timestamp);
+
+  // Verify metrics have not been flushed yet
+  ASSERT_EQ(data.metrics.underrun_count, 0U);
+  ASSERT_EQ(data.metrics.overrun_count, 0U);
+  ASSERT_EQ(data.metrics.total_writes, 0U);
+  ASSERT_EQ(data.metrics.total_frames_written, 0U);
+  ASSERT_EQ(data.metrics.total_frames_lost, 0U);
+  ASSERT_EQ(data.metrics.timestamp, 0U);
+
+  smoothness->flush(smoothness.get());
+
+  // Verify metrics have been flushed.
+  EXPECT_EQ(data.metrics.underrun_count, 1U);
+  EXPECT_EQ(data.metrics.overrun_count, 1U);
+  EXPECT_EQ(data.metrics.total_writes, 3U);
+  EXPECT_EQ(data.metrics.total_frames_written, 1200U);
+  EXPECT_EQ(data.metrics.total_frames_lost, 1800U);
+  EXPECT_EQ(data.metrics.timestamp, timestamp++);
+
+  const unsigned int frames_written_on_write = 1000;
+  // At this point, metrics values should be reset. We will write 5 more times
+  // to trigger the callback again.
+  for (unsigned int i = 0; i < num_write_to_log; i++) {
+    // last timestamp will be 208 because 204 + 4.
+    smoothness->increment_total_writes(smoothness.get(),
+                                       /* frames_written= */
+                                       frames_written_on_write,
+                                       /* timestamp */ timestamp + i);
+  }
+
+  EXPECT_EQ(data.metrics.underrun_count, 0U);
+  EXPECT_EQ(data.metrics.overrun_count, 0U);
+  EXPECT_EQ(data.metrics.total_writes, 5U);
+  EXPECT_EQ(data.metrics.total_frames_written,
+            frames_written_on_write * num_write_to_log);
+  EXPECT_EQ(data.metrics.total_frames_lost, 0U);
+  EXPECT_EQ(data.metrics.timestamp, 208U);
+}
diff --git a/audio_utils/tests/mel_aggregator_tests.cpp b/audio_utils/tests/mel_aggregator_tests.cpp
new file mode 100644
index 0000000..1d29837
--- /dev/null
+++ b/audio_utils/tests/mel_aggregator_tests.cpp
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_mel_aggregator_tests"
+
+#include <audio_utils/MelAggregator.h>
+
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+
+namespace android::audio_utils {
+namespace {
+
+constexpr int32_t kTestPortId = 1;
+constexpr float kFloatError = 0.1f;
+constexpr float kMelFloatError = 0.0001f;
+
+/** Value used for CSD calculation. 3 MELs with this value will cause a change of 1% in CSD. */
+constexpr float kCustomMelDbA = 107.f;
+
+using ::testing::ElementsAre;
+using ::testing::Pointwise;
+using ::testing::FloatNear;
+
+TEST(MelAggregatorTest, ResetAggregator) {
+    MelAggregator aggregator{100};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(1, {10.f, 10.f}, 0));
+    aggregator.reset(1.f, {CsdRecord(1, 1, 1.f, 1.f)});
+
+    EXPECT_EQ(aggregator.getCachedMelRecordsSize(), size_t{0});
+    EXPECT_EQ(aggregator.getCsd(), 1.f);
+    EXPECT_EQ(aggregator.getCsdRecordsSize(), size_t{1});
+}
+
+TEST(MelAggregatorTest, AggregateValuesFromDifferentStreams) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 100};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {10.f, 10.f},
+                                                     /* timestamp */0));
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {10.f, 10.f},
+                                                     /* timestamp */0));
+
+    ASSERT_EQ(aggregator.getCachedMelRecordsSize(), size_t{1});
+    aggregator.foreachCachedMel([](const MelRecord &record) {
+        EXPECT_EQ(record.portId, kTestPortId);
+        EXPECT_THAT(record.mels, Pointwise(FloatNear(kFloatError), {13.f, 13.f}));
+    });
+}
+
+TEST(MelAggregatorTest, AggregateWithOlderValues) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 100};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {1.f, 1.f},
+                                                     /* timestamp */1));
+    // second mel array contains values that are older than the first entry
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {2.f, 2.f, 2.f},
+                                                     /* timestamp */0));
+
+    ASSERT_EQ(aggregator.getCachedMelRecordsSize(), size_t{1});
+    aggregator.foreachCachedMel([](const MelRecord &record) {
+        EXPECT_EQ(record.portId, kTestPortId);
+        EXPECT_THAT(record.mels, Pointwise(FloatNear(kFloatError), {2.f, 4.5f, 4.5f}));
+    });
+}
+
+TEST(MelAggregatorTest, AggregateWithNewerValues) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 100};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {1.f, 1.f},
+                                                     /* timestamp */1));
+    // second mel array contains values that are older than the first entry
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {2.f, 2.f},
+                                                     /* timestamp */2));
+
+    ASSERT_EQ(aggregator.getCachedMelRecordsSize(), size_t{1});
+    aggregator.foreachCachedMel([](const MelRecord &record) {
+        EXPECT_EQ(record.portId, kTestPortId);
+        EXPECT_THAT(record.mels, Pointwise(FloatNear(kFloatError), {1.f, 4.5f, 2.f}));
+    });
+}
+
+TEST(MelAggregatorTest, AggregateWithNonOverlappingValues) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 100};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {1.f, 1.f},
+                                                     /* timestamp */0));
+    // second mel array contains values that are older than the first entry
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {1.f, 1.f},
+                                                     /* timestamp */2));
+
+    ASSERT_EQ(aggregator.getCachedMelRecordsSize(), size_t{2});
+    aggregator.foreachCachedMel([](const MelRecord &record) {
+        EXPECT_EQ(record.portId, kTestPortId);
+        EXPECT_THAT(record.mels, Pointwise(FloatNear(kFloatError), {1.f, 1.f}));
+    });
+}
+
+TEST(MelAggregatorTest, CheckMelIntervalSplit) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 100};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {3.f, 3.f}, /* timestamp */1));
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId, {3.f, 3.f, 3.f, 3.f},
+                                                     /* timestamp */0));
+
+    ASSERT_EQ(aggregator.getCachedMelRecordsSize(), size_t{1});
+
+    aggregator.foreachCachedMel([](const MelRecord &record) {
+        EXPECT_EQ(record.portId, kTestPortId);
+        EXPECT_THAT(record.mels, Pointwise(FloatNear(kFloatError), {3.f, 6.f, 6.f, 3.f}));
+    });
+}
+
+TEST(MelAggregatorTest, CsdRollingWindowDiscardsOldElements) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 3};
+
+    aggregator.aggregateAndAddNewMelRecord(MelRecord(kTestPortId,
+                                                     std::vector<float>(3, kCustomMelDbA),
+                                                     /* timestamp */0));
+    float csdValue = aggregator.getCsd();
+    auto records = aggregator.aggregateAndAddNewMelRecord(
+        MelRecord(kTestPortId, std::vector<float>(3, kCustomMelDbA), /* timestamp */3));
+
+    EXPECT_EQ(records.size(), size_t{2});  // new record and record to remove
+    EXPECT_TRUE(records[0].value * records[1].value < 0.f);
+    EXPECT_EQ(csdValue, aggregator.getCsd());
+    EXPECT_EQ(aggregator.getCsdRecordsSize(), size_t{1});
+}
+
+TEST(MelAggregatorTest, CsdReaches100PercWith107dB) {
+    MelAggregator aggregator{/* csdWindowSeconds */ 300};
+
+    // 287s of 107dB should produce at least 100% CSD
+    auto records = aggregator.aggregateAndAddNewMelRecord(
+        MelRecord(kTestPortId, std::vector<float>(288, kCustomMelDbA), /* timestamp */0));
+
+    // each record should have a CSD value between 1% and 2%
+    EXPECT_GE(records.size(), size_t{50});
+    EXPECT_GE(aggregator.getCsd(), 1.f);
+}
+
+TEST(MelAggregatorTest, CsdReaches100PercWith80dB) {
+    constexpr int64_t seconds40h = 40*3600;
+    MelAggregator aggregator{seconds40h};
+
+    // 40h of 80dB should produce (near) exactly 100% CSD
+    auto records = aggregator.aggregateAndAddNewMelRecord(
+        MelRecord(kTestPortId,
+                  std::vector<float>(seconds40h, 80.0f),
+            /* timestamp */0));
+
+    // each record should have a CSD value between 1% and 2%
+    EXPECT_GE(records.size(), size_t{50});
+    EXPECT_NEAR(aggregator.getCsd(), 1.f, kMelFloatError);
+}
+
+}  // namespace
+}  // namespace android
diff --git a/audio_utils/tests/mel_processor_tests.cpp b/audio_utils/tests/mel_processor_tests.cpp
new file mode 100644
index 0000000..ced43b2
--- /dev/null
+++ b/audio_utils/tests/mel_processor_tests.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_mel_processor_tests"
+
+#include <audio_utils/MelProcessor.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <chrono>
+#include <cmath>
+#include <tuple>
+#include <unordered_map>
+#include <log/log.h>
+
+namespace android::audio_utils {
+namespace {
+
+using ::testing::_;
+using ::testing::AtMost;
+using ::testing::Eq;
+using ::testing::Le;
+using ::testing::Gt;
+using ::testing::TestWithParam;
+using ::testing::Values;
+using ::testing::Combine;
+
+// Contains the sample rate and frequency for sine wave
+using AudioParam = std::tuple<int32_t, int32_t>;
+
+const std::unordered_map<int32_t, int32_t> kAWeightDelta1000 =
+    {{80, 23}, {100, 19}, {500, 3}, {1000, 0}, {2000, 1}, {3000, 1},
+     {8000, 1}};
+
+// TODO(b/276849537): should replace this with proper synchornization
+constexpr size_t kCallbackTimeoutInMs = 20;
+
+class MelCallbackMock : public MelProcessor::MelCallback {
+public:
+    MOCK_METHOD(void, onNewMelValues, (const std::vector<float>&, size_t, size_t,
+              audio_port_handle_t), (const override));
+    MOCK_METHOD(void, onMomentaryExposure, (float, audio_port_handle_t), (const override));
+};
+
+void appendSineWaveBuffer(std::vector<float>& buffer,
+                          float frequency,
+                          size_t samples,
+                          int32_t sampleRate,
+                          float attenuation = 1.0f) {
+    float rad = 2.0f * (float) M_PI * frequency / (float) sampleRate;
+    for (size_t j = 0; j < samples; ++j) {
+        buffer.push_back(sinf(j * rad) * attenuation);
+    }
+}
+
+class MelProcessorFixtureTest : public TestWithParam<AudioParam> {
+protected:
+    MelProcessorFixtureTest()
+        : mSampleRate(std::get<0>(GetParam())),
+          mFrequency(std::get<1>(GetParam())),
+          mMelCallback(sp<MelCallbackMock>::make()),
+          mProcessor(sp<MelProcessor>::make(mSampleRate,
+                                            1,
+                                            AUDIO_FORMAT_PCM_FLOAT,
+                                            mMelCallback,
+                                            mDeviceId,
+                                            mDefaultRs2,
+                                            mMaxMelsCallback)) {}
+
+
+    int32_t mSampleRate;
+    int32_t mFrequency;
+    size_t mMaxMelsCallback = 2;
+    audio_port_handle_t mDeviceId = 1;
+    int32_t mDefaultRs2 = 100;
+
+    sp<MelCallbackMock> mMelCallback;
+    sp<MelProcessor> mProcessor;
+
+    std::vector<float> mBuffer;
+};
+
+TEST(MelProcessorTest, UnsupportedSamplerateCheck) {
+    sp<MelCallbackMock> callback = sp<MelCallbackMock>::make();
+    auto processor = sp<MelProcessor>::make(1000, 1, AUDIO_FORMAT_PCM_FLOAT, callback, 1, 100);
+    std::vector<float> buffer(1000);
+
+    EXPECT_EQ(processor->process(buffer.data(), 1000), 0);
+}
+
+TEST_P(MelProcessorFixtureTest, CheckNumberOfCallbacks) {
+    if (mFrequency != 1000.0f) {
+        ALOGV("NOTE: CheckNumberOfCallbacks disabled for frequency %d", mFrequency);
+        return;
+    }
+
+    appendSineWaveBuffer(mBuffer, 1000.0f, mSampleRate * mMaxMelsCallback, mSampleRate);
+    appendSineWaveBuffer(mBuffer, 1000.0f, mSampleRate * mMaxMelsCallback, mSampleRate, 0.01f);
+
+    EXPECT_CALL(*mMelCallback.get(), onMomentaryExposure(Gt(mDefaultRs2), Eq(mDeviceId)))
+        .Times(AtMost(2));
+    EXPECT_CALL(*mMelCallback.get(), onNewMelValues(_, _, Le(size_t{2}), Eq(mDeviceId))).Times(1);
+
+    EXPECT_GT(mProcessor->process(mBuffer.data(), mBuffer.size() * sizeof(float)), 0);
+    std::this_thread::sleep_for(std::chrono::milliseconds(kCallbackTimeoutInMs));
+}
+
+TEST_P(MelProcessorFixtureTest, CheckAWeightingFrequency) {
+    appendSineWaveBuffer(mBuffer, mFrequency, mSampleRate, mSampleRate);
+    appendSineWaveBuffer(mBuffer, 1000.0f, mSampleRate, mSampleRate);
+
+    EXPECT_CALL(*mMelCallback.get(), onMomentaryExposure(Gt(mDefaultRs2), Eq(mDeviceId)))
+        .Times(AtMost(2));
+    EXPECT_CALL(*mMelCallback.get(), onNewMelValues(_, _, _, Eq(mDeviceId)))
+        .Times(1)
+        .WillRepeatedly([&] (const std::vector<float>& mel, size_t offset, size_t length,
+                                audio_port_handle_t deviceId) {
+            EXPECT_EQ(offset, size_t{0});
+            EXPECT_EQ(length, mMaxMelsCallback);
+            EXPECT_EQ(deviceId, mDeviceId);
+            int32_t deltaValue = abs(mel[0] - mel[1]);
+            ALOGV("MEL[%d] = %.2f,  MEL[1000] = %.2f\n", mFrequency, mel[0], mel[1]);
+            EXPECT_TRUE(abs(deltaValue - kAWeightDelta1000.at(mFrequency)) <= 1.f);
+        });
+
+    EXPECT_GT(mProcessor->process(mBuffer.data(), mBuffer.size() * sizeof(float)), 0);
+    std::this_thread::sleep_for(std::chrono::milliseconds(kCallbackTimeoutInMs));
+}
+
+TEST_P(MelProcessorFixtureTest, AttenuationCheck) {
+    auto processorAttenuation =
+        sp<MelProcessor>::make(mSampleRate, 1, AUDIO_FORMAT_PCM_FLOAT, mMelCallback, mDeviceId+1,
+                     mDefaultRs2, mMaxMelsCallback);
+    float attenuationDB = -10.f;
+    std::vector<float> bufferAttenuation;
+    float melAttenuation = 0.f;
+    float melNoAttenuation = 0.f;
+
+    processorAttenuation->setAttenuation(attenuationDB);
+    appendSineWaveBuffer(bufferAttenuation, mFrequency, mSampleRate * mMaxMelsCallback,
+                         mSampleRate);
+    appendSineWaveBuffer(mBuffer, mFrequency, mSampleRate * mMaxMelsCallback, mSampleRate);
+
+    EXPECT_CALL(*mMelCallback.get(), onMomentaryExposure(Gt(mDefaultRs2), _))
+        .Times(AtMost(2 * mMaxMelsCallback));
+    EXPECT_CALL(*mMelCallback.get(), onNewMelValues(_, _, _, _))
+        .Times(AtMost(2))
+        .WillRepeatedly([&] (const std::vector<float>& mel, size_t offset, size_t length,
+                                audio_port_handle_t deviceId) {
+            EXPECT_EQ(offset, size_t{0});
+            EXPECT_EQ(length, mMaxMelsCallback);
+
+            if (deviceId == mDeviceId) {
+                melNoAttenuation = mel[0];
+            } else {
+                melAttenuation = mel[0];
+            }
+        });
+    EXPECT_GT(mProcessor->process(mBuffer.data(),
+                                  mSampleRate * mMaxMelsCallback * sizeof(float)), 0);
+    EXPECT_GT(processorAttenuation->process(bufferAttenuation.data(),
+                                            mSampleRate * mMaxMelsCallback * sizeof(float)), 0);
+    std::this_thread::sleep_for(std::chrono::milliseconds(kCallbackTimeoutInMs));
+    // with attenuation for some frequencies the MEL callback does not exceed the RS1 threshold
+    if (melAttenuation > 0.f) {
+        EXPECT_EQ(fabsf(melAttenuation - melNoAttenuation), fabsf(attenuationDB));
+    }
+}
+
+INSTANTIATE_TEST_SUITE_P(MelProcessorTestSuite,
+    MelProcessorFixtureTest,
+    Combine(Values(44100, 48000), Values(80, 100, 500, 1000, 2000, 3000, 8000))
+);
+
+}  // namespace
+}  // namespace android
diff --git a/audio_utils/tests/spatializer_utils_tests.cpp b/audio_utils/tests/spatializer_utils_tests.cpp
new file mode 100644
index 0000000..7e8cc31
--- /dev/null
+++ b/audio_utils/tests/spatializer_utils_tests.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+#include <system/audio.h>
+
+TEST(spatializer_utils_tests, basic)
+{
+    // We don't spatialize mono at this time.
+    ASSERT_FALSE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_MONO));
+
+    // These common multichannel formats we spatialize.
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_QUAD));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_QUAD_SIDE));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_5POINT1));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_5POINT1_SIDE));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_5POINT1POINT4));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_7POINT1));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_7POINT1POINT2));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_7POINT1POINT4));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_9POINT1POINT4));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_9POINT1POINT6));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_13POINT_360RA));
+    ASSERT_TRUE(audio_is_channel_mask_spatialized(AUDIO_CHANNEL_OUT_22POINT2));
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index 28a2a11..019e892 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -67,7 +67,7 @@
     local_include_dirs: ["include"],
 
     static_libs: [
-        "android.hardware.camera.metadata-V1-ndk",
+        "android.hardware.camera.metadata-V2-ndk",
     ],
 
     cflags: [
diff --git a/camera/docs/ACameraMetadata.mako b/camera/docs/ACameraMetadata.mako
index effa5ea..d62fcc3 100644
--- a/camera/docs/ACameraMetadata.mako
+++ b/camera/docs/ACameraMetadata.mako
@@ -67,7 +67,7 @@
 // System tags that should be hidden from users
 std::unordered_set<uint32_t> ACameraMetadata::sSystemTags ({
     % for sec in find_all_sections(metadata):
-      % for entry in remove_synthetic_or_fwk_only(find_unique_entries(sec)):
+      % for entry in remove_hal_non_visible(find_unique_entries(sec)):
         % if entry.applied_visibility == "system":
     ${entry.name | csym},
         % endif
diff --git a/camera/docs/CameraDeviceInfo.mako b/camera/docs/CameraDeviceInfo.mako
index 1f89865..21c6539 100644
--- a/camera/docs/CameraDeviceInfo.mako
+++ b/camera/docs/CameraDeviceInfo.mako
@@ -24,7 +24,8 @@
         HashSet<String> charsKeyNames = new HashSet<String>();
 % for sec in find_all_sections(metadata):
   % for entry in find_unique_entries(sec):
-    % if entry.kind == 'static' and entry.visibility in ("public", "java_public"):
+    % if entry.kind == 'static' and entry.visibility in \
+            ("public", "java_public", "fwk_java_public"):
         charsKeyNames.add(CameraCharacteristics.${jkey_identifier(entry.name)}.getName());
     % endif
   % endfor
diff --git a/camera/docs/CameraMetadataEnums.mako b/camera/docs/CameraMetadataEnums.mako
index 0541804..ee2431e 100644
--- a/camera/docs/CameraMetadataEnums.mako
+++ b/camera/docs/CameraMetadataEnums.mako
@@ -61,11 +61,13 @@
 % for outer_namespace in metadata.outer_namespaces: ## assumes single 'android' namespace
   % for section in outer_namespace.sections:
     % if section.find_first(lambda x: isinstance(x, metadata_model.Entry) and x.kind == xml_name) and \
-         any_visible(section, xml_name, ('public','hidden', 'ndk_public', 'java_public') ):
+         any_visible(section, xml_name, ('public','hidden', 'ndk_public', 'java_public', \
+         'fwk_java_public') ):
       % for inner_namespace in get_children_by_filtering_kind(section, xml_name, 'namespaces'):
 ## We only support 1 level of inner namespace, i.e. android.a.b and android.a.b.c works, but not android.a.b.c.d
 ## If we need to support more, we should use a recursive function here instead.. but the indentation gets trickier.
-        % for entry in filter_visibility(inner_namespace.entries, ('hidden','public', 'ndk_public', 'java_public')):
+        % for entry in filter_visibility(inner_namespace.entries, ('hidden','public', 'ndk_public', \
+        'java_public', 'fwk_pubic')):
           % if entry.enum \
               and not (entry.typedef and entry.typedef.languages.get('java')) \
               and not entry.is_clone():
@@ -75,7 +77,7 @@
       % endfor
       % for entry in filter_visibility( \
           get_children_by_filtering_kind(section, xml_name, 'entries'), \
-                                         ('hidden', 'public', 'ndk_public', 'java_public')):
+              ('hidden', 'public', 'ndk_public', 'java_public', 'fwk_java_public')):
         % if entry.enum \
              and not (entry.typedef and entry.typedef.languages.get('java')) \
              and not entry.is_clone():
diff --git a/camera/docs/CameraMetadataKeys.mako b/camera/docs/CameraMetadataKeys.mako
index ac3a920..6af0cdf 100644
--- a/camera/docs/CameraMetadataKeys.mako
+++ b/camera/docs/CameraMetadataKeys.mako
@@ -61,7 +61,7 @@
   % if entry.deprecated:
     @Deprecated
   % endif
-  % if entry.applied_visibility in ('public', 'java_public'):
+  % if entry.applied_visibility in ('public', 'java_public', 'fwk_java_public'):
     @PublicKey
     @NonNull
   % endif
@@ -77,17 +77,17 @@
 % for outer_namespace in metadata.outer_namespaces: ## assumes single 'android' namespace
   % for section in outer_namespace.sections:
     % if section.find_first(lambda x: isinstance(x, metadata_model.Entry) and x.kind == xml_name) and \
-         any_visible(section, xml_name, ('public','hidden','ndk_public','java_public','fwk_only') ):
+         any_visible(section, xml_name, ('public','hidden','ndk_public','java_public','fwk_only','fwk_java_public') ):
       % for inner_namespace in get_children_by_filtering_kind(section, xml_name, 'namespaces'):
 ## We only support 1 level of inner namespace, i.e. android.a.b and android.a.b.c works, but not android.a.b.c.d
 ## If we need to support more, we should use a recursive function here instead.. but the indentation gets trickier.
-        % for entry in filter_visibility(inner_namespace.merged_entries, ('hidden','public', 'ndk_public', 'java_public ', 'fwk_only')):
+        % for entry in filter_visibility(inner_namespace.merged_entries, ('hidden','public','ndk_public','java_public','fwk_only','fwk_java_public')):
 ${generate_key(entry)}
        % endfor
     % endfor
     % for entry in filter_visibility( \
         get_children_by_filtering_kind(section, xml_name, 'merged_entries'), \
-                                         ('hidden', 'public', 'ndk_public', 'java_public', 'fwk_only')):
+               ('hidden', 'public', 'ndk_public', 'java_public', 'fwk_only', 'fwk_java_public')):
 ${generate_key(entry)}
     % endfor
     % endif
diff --git a/camera/docs/HidlMetadata.mako b/camera/docs/HidlMetadata.mako
index f9323d9..e3e4e88 100644
--- a/camera/docs/HidlMetadata.mako
+++ b/camera/docs/HidlMetadata.mako
@@ -105,7 +105,7 @@
 enum CameraMetadataTag : ${'uint32_t' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else '@%d.%d::CameraMetadataTag' % (hal_major_version(), hal_minor_version()-1)} {
     % for sec in find_all_sections(metadata):
 <%    gotEntries = False %>\
-      % for idx,entry in enumerate(filter_added_in_hal_version(remove_synthetic_or_fwk_only(find_unique_entries(sec)), hal_major_version(), hal_minor_version())):
+      % for idx,entry in enumerate(filter_added_in_hal_version(remove_hal_non_visible(find_unique_entries(sec)), hal_major_version(), hal_minor_version())):
 <%      gotEntries = True %>\
     /** ${entry.name} [${entry.kind}, ${annotated_type(entry)}, ${entry.applied_visibility}]
         % if entry.description:
@@ -136,7 +136,7 @@
  * Enumeration definitions for the various entries that need them
  */
 % for sec in find_all_sections(metadata):
-  % for entry in filter_has_enum_values_added_in_hal_version(remove_synthetic_or_fwk_only(find_unique_entries(sec)), hal_major_version(), hal_minor_version()):
+  % for entry in filter_has_enum_values_added_in_hal_version(remove_hal_non_visible(find_unique_entries(sec)), hal_major_version(), hal_minor_version()):
     % if entry.enum:
 
 <%    isFirstValue = True %>\
diff --git a/camera/docs/aidl/CameraMetadataEnum.mako b/camera/docs/aidl/CameraMetadataEnum.mako
index 02822b3..7c6b364 100644
--- a/camera/docs/aidl/CameraMetadataEnum.mako
+++ b/camera/docs/aidl/CameraMetadataEnum.mako
@@ -51,7 +51,7 @@
   _entry = None
   _enum_name = None
   for sec in find_all_sections(metadata):
-    for entry in remove_synthetic_or_fwk_only(find_unique_entries(sec)):
+    for entry in remove_hal_non_visible(find_unique_entries(sec)):
       if entry.name == enum():
         _entry = entry
         _enum_name = entry.name.removeprefix("android.")
diff --git a/camera/docs/aidl/CameraMetadataTag.mako b/camera/docs/aidl/CameraMetadataTag.mako
index c4a3be5..e79fd64 100644
--- a/camera/docs/aidl/CameraMetadataTag.mako
+++ b/camera/docs/aidl/CameraMetadataTag.mako
@@ -57,7 +57,7 @@
 <% gap = False %>\
 % for sec_idx,sec in enumerate(find_all_sections(metadata)):
   % for idx,entry in enumerate(remove_synthetic(find_unique_entries(sec))):
-    % if entry.visibility == 'fwk_only':
+    % if entry.visibility in ('fwk_only', 'fwk_java_public'):
 <% gap = True %>\
 <% curIdx += 1 %>\
 <% continue %>\
diff --git a/camera/docs/camera_device_info.mako b/camera/docs/camera_device_info.mako
index f213f54..dadf128 100644
--- a/camera/docs/camera_device_info.mako
+++ b/camera/docs/camera_device_info.mako
@@ -90,6 +90,34 @@
 
     optional string cameraId = 1;
 
+    message Capability {
+        optional int32 mode = 1;
+        optional int32 maxStreamingWidth = 2;
+        optional int32 maxStreamingHeight = 3;
+        optional float minZoomRatio = 4;
+        optional float maxZoomRatio = 5;
+    }
+
+    message DynamicRangeProfiles {
+        repeated int64 dynamic_range_profiles = 1 [packed = true];
+    }
+
+    message DeviceStateSensorOrientationMap {
+        repeated int64 elements = 1 [packed = true];
+    }
+
+    message ColorSpaceProfiles {
+        message ColorSpaceProfile {
+            message FormatAndDynamicRangeProfiles {
+                optional int32 image_format = 1;
+                repeated int64 dynamic_range_profiles = 2 [packed = true];
+            }
+            optional int32 color_space = 1;
+            repeated FormatAndDynamicRangeProfiles image_formats = 2;
+        }
+        repeated ColorSpaceProfile color_space_profiles = 1;
+    }
+
     // Start of codegen fields
 <%
   section_idx = 1
@@ -100,7 +128,7 @@
   idx = section_idx * pow(2,16)
 %>\
 % for entry in find_unique_entries(sec):
-% if entry.kind == 'static' and entry.visibility in ("public", "java_public"):
+% if entry.kind == 'static' and entry.visibility in ("public", "java_public", "fwk_java_public"):
     ${protobuf_type(entry)} ${protobuf_name(entry)} = ${idx};
 <%
     idx += 1
diff --git a/camera/docs/camera_device_info.proto b/camera/docs/camera_device_info.proto
index 24ca312..2f922cb 100644
--- a/camera/docs/camera_device_info.proto
+++ b/camera/docs/camera_device_info.proto
@@ -90,6 +90,34 @@
 
     optional string cameraId = 1;
 
+    message Capability {
+        optional int32 mode = 1;
+        optional int32 maxStreamingWidth = 2;
+        optional int32 maxStreamingHeight = 3;
+        optional float minZoomRatio = 4;
+        optional float maxZoomRatio = 5;
+    }
+
+    message DynamicRangeProfiles {
+        repeated int64 dynamic_range_profiles = 1 [packed = true];
+    }
+
+    message DeviceStateSensorOrientationMap {
+        repeated int64 elements = 1 [packed = true];
+    }
+
+    message ColorSpaceProfiles {
+        message ColorSpaceProfile {
+            message FormatAndDynamicRangeProfiles {
+                optional int32 image_format = 1;
+                repeated int64 dynamic_range_profiles = 2 [packed = true];
+            }
+            optional int32 color_space = 1;
+            repeated FormatAndDynamicRangeProfiles image_formats = 2;
+        }
+        repeated ColorSpaceProfile color_space_profiles = 1;
+    }
+
     // Start of codegen fields
     repeated int32 android_colorCorrection_availableAberrationModes = 65536;
     repeated int32 android_control_aeAvailableAntibandingModes = 131072;
@@ -111,6 +139,8 @@
     optional RangeInt android_control_postRawSensitivityBoostRange = 131088;
     repeated Capability android_control_availableExtendedSceneModeCapabilities = 131089;
     optional RangeFloat android_control_zoomRatioRange = 131090;
+    repeated int32 android_control_availableSettingsOverrides = 131091;
+    optional bool android_control_autoframingAvailable = 131092;
     repeated int32 android_edge_availableEdgeModes = 262144;
     optional bool android_flash_info_available = 393216;
     optional int32 android_flash_info_strengthMaximumLevel = 393217;
@@ -138,6 +168,7 @@
     repeated int32 android_request_availableCapabilities = 851974;
     optional DynamicRangeProfiles android_request_availableDynamicRangeProfiles = 851975;
     optional int64 android_request_recommendedTenBitDynamicRangeProfile = 851976;
+    optional ColorSpaceProfiles android_request_availableColorSpaceProfiles = 851977;
     optional float android_scaler_availableMaxDigitalZoom = 917504;
     optional StreamConfigurations android_scaler_streamConfigurationMap = 917505;
     optional int32 android_scaler_croppingType = 917506;
@@ -165,6 +196,7 @@
     optional int32 android_sensor_orientation = 983050;
     repeated int32 android_sensor_availableTestPatternModes = 983051;
     repeated Rect android_sensor_opticalBlackRegions = 983052;
+    optional int32 android_sensor_readoutTimestamp = 983053;
     optional Rect android_sensor_info_activeArraySize = 1048576;
     optional RangeInt android_sensor_info_sensitivityRange = 1048577;
     optional int32 android_sensor_info_colorFilterArrangement = 1048578;
diff --git a/camera/docs/camera_metadata_asserts.mako b/camera/docs/camera_metadata_asserts.mako
index 94da986..f92c790 100644
--- a/camera/docs/camera_metadata_asserts.mako
+++ b/camera/docs/camera_metadata_asserts.mako
@@ -50,7 +50,7 @@
 #include <aidl/android/hardware/camera/metadata/CameraMetadataSectionStart.h>
 #include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
 % for sec in find_all_sections(metadata):
-  % for entry in remove_synthetic_or_fwk_only(find_unique_entries(sec)):
+  % for entry in remove_hal_non_visible(find_unique_entries(sec)):
     % if entry.enum:
 #include <aidl/android/hardware/camera/metadata/${aidl_enum_name(entry)}.h>
     % endif
@@ -74,13 +74,13 @@
         == static_cast<int>(${aidl_camera_metadata_section_start("VENDOR_SECTION_START")}));
 
 % for sec in find_all_sections(metadata):
-  % for idx,entry in enumerate(remove_synthetic_or_fwk_only(find_unique_entries(sec))):
+  % for idx,entry in enumerate(remove_hal_non_visible(find_unique_entries(sec))):
 static_assert(static_cast<int>(${csym(entry.name)})
         == static_cast<int>(${aidl_camera_metadata_tag(csym(entry.name))}));
   % endfor
 % endfor
 % for sec in find_all_sections(metadata):
-  % for entry in remove_synthetic_or_fwk_only(find_unique_entries(sec)):
+  % for entry in remove_hal_non_visible(find_unique_entries(sec)):
     % if entry.enum:
 
       % for val in aidl_enum_values(entry):
diff --git a/camera/docs/docs.html b/camera/docs/docs.html
index 44bef14..1a495ac 100644
--- a/camera/docs/docs.html
+++ b/camera/docs/docs.html
@@ -203,6 +203,10 @@
             ><a href="#controls_android.control.aeRegionsSet">android.control.aeRegionsSet</a></li>
             <li
             ><a href="#controls_android.control.awbRegionsSet">android.control.awbRegionsSet</a></li>
+            <li
+            ><a href="#controls_android.control.settingsOverride">android.control.settingsOverride</a></li>
+            <li
+            ><a href="#controls_android.control.autoframing">android.control.autoframing</a></li>
           </ul>
         </li>
         <li>
@@ -258,6 +262,10 @@
             ><a href="#static_android.control.zoomRatioRange">android.control.zoomRatioRange</a></li>
             <li
             ><a href="#static_android.control.availableHighSpeedVideoConfigurationsMaximumResolution">android.control.availableHighSpeedVideoConfigurationsMaximumResolution</a></li>
+            <li
+            ><a href="#static_android.control.availableSettingsOverrides">android.control.availableSettingsOverrides</a></li>
+            <li
+            ><a href="#static_android.control.autoframingAvailable">android.control.autoframingAvailable</a></li>
           </ul>
         </li>
         <li>
@@ -321,6 +329,14 @@
             ><a href="#dynamic_android.control.extendedSceneMode">android.control.extendedSceneMode</a></li>
             <li
             ><a href="#dynamic_android.control.zoomRatio">android.control.zoomRatio</a></li>
+            <li
+            ><a href="#dynamic_android.control.settingsOverride">android.control.settingsOverride</a></li>
+            <li
+            ><a href="#dynamic_android.control.settingsOverridingFrameNumber">android.control.settingsOverridingFrameNumber</a></li>
+            <li
+            ><a href="#dynamic_android.control.autoframing">android.control.autoframing</a></li>
+            <li
+            ><a href="#dynamic_android.control.autoframingState">android.control.autoframingState</a></li>
           </ul>
         </li>
       </ul> <!-- toc_section -->
@@ -711,6 +727,10 @@
             ><a href="#static_android.request.availableDynamicRangeProfilesMap">android.request.availableDynamicRangeProfilesMap</a></li>
             <li
             ><a href="#static_android.request.recommendedTenBitDynamicRangeProfile">android.request.recommendedTenBitDynamicRangeProfile</a></li>
+            <li
+            ><a href="#static_android.request.availableColorSpaceProfiles">android.request.availableColorSpaceProfiles</a></li>
+            <li
+            ><a href="#static_android.request.availableColorSpaceProfilesMap">android.request.availableColorSpaceProfilesMap</a></li>
           </ul>
         </li>
         <li>
@@ -831,6 +851,8 @@
             ><a href="#dynamic_android.scaler.cropRegion">android.scaler.cropRegion</a></li>
             <li
             ><a href="#dynamic_android.scaler.rotateAndCrop">android.scaler.rotateAndCrop</a></li>
+            <li
+            ><a href="#dynamic_android.scaler.rawCropRegion">android.scaler.rawCropRegion</a></li>
           </ul>
         </li>
       </ul> <!-- toc_section -->
@@ -924,6 +946,8 @@
             ><a href="#static_android.sensor.opaqueRawSize">android.sensor.opaqueRawSize</a></li>
             <li
             ><a href="#static_android.sensor.opaqueRawSizeMaximumResolution">android.sensor.opaqueRawSizeMaximumResolution</a></li>
+            <li
+            ><a href="#static_android.sensor.readoutTimestamp">android.sensor.readoutTimestamp</a></li>
           </ul>
         </li>
         <li>
@@ -1388,6 +1412,49 @@
         </li>
       </ul> <!-- toc_section -->
     </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_extension">extension</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.extension.strength">android.extension.strength</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.extension.currentType">android.extension.currentType</a></li>
+            <li
+            ><a href="#dynamic_android.extension.strength">android.extension.strength</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_jpegr">jpegr</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.jpegr.availableJpegRStreamConfigurations">android.jpegr.availableJpegRStreamConfigurations</a></li>
+            <li
+            ><a href="#static_android.jpegr.availableJpegRMinFrameDurations">android.jpegr.availableJpegRMinFrameDurations</a></li>
+            <li
+            ><a href="#static_android.jpegr.availableJpegRStallDurations">android.jpegr.availableJpegRStallDurations</a></li>
+            <li
+            ><a href="#static_android.jpegr.availableJpegRStreamConfigurationsMaximumResolution">android.jpegr.availableJpegRStreamConfigurationsMaximumResolution</a></li>
+            <li
+            ><a href="#static_android.jpegr.availableJpegRMinFrameDurationsMaximumResolution">android.jpegr.availableJpegRMinFrameDurationsMaximumResolution</a></li>
+            <li
+            ><a href="#static_android.jpegr.availableJpegRStallDurationsMaximumResolution">android.jpegr.availableJpegRStallDurationsMaximumResolution</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
   </ul>
 
 
@@ -2691,7 +2758,7 @@
                     <span class="entry_type_enum_name">ON_EXTERNAL_FLASH (v3.3)</span>
                     <span class="entry_type_enum_notes"><p>An external flash has been turned on.<wbr/></p>
 <p>It informs the camera device that an external flash has been turned on,<wbr/> and that
-metering (and continuous focus if active) should be quickly recaculated to account
+metering (and continuous focus if active) should be quickly recalculated to account
 for the external flash.<wbr/> Otherwise,<wbr/> this mode acts like ON.<wbr/></p>
 <p>When the external flash is turned off,<wbr/> AE mode should be changed to one of the
 other available AE modes.<wbr/></p>
@@ -2869,7 +2936,9 @@
 mode.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/>
+capability or devices where
+<a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>
 <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
@@ -3376,7 +3445,10 @@
 mode.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/> <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
+capability or devices where
+<a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>,<wbr/>
+<a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
@@ -3880,7 +3952,10 @@
 mode.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/> <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
+capability or devices where
+<a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>,<wbr/>
+<a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
@@ -5318,6 +5393,248 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="controls_android.control.settingsOverride">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>settings<wbr/>Override
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>No keys are applied sooner than the other keys when applying CaptureRequest
+settings to the camera device.<wbr/> This is the default value.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZOOM (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Zoom related keys are applied sooner than the other keys in the CaptureRequest.<wbr/> The
+zoom related keys are:</p>
+<ul>
+<li><a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a></li>
+<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
+<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
+</ul>
+<p>Even though <a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>,<wbr/> <a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>,<wbr/>
+and <a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a> are not directly zoom related,<wbr/> applications
+typically scale these regions together with <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> to have a
+consistent mapping within the current field of view.<wbr/> In this aspect,<wbr/> they are
+related to <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> and <a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">VENDOR_START (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                    <span class="entry_type_enum_value">0x4000</span>
+                    <span class="entry_type_enum_notes"><p>Vendor defined settingsOverride.<wbr/> These depend on vendor implementation.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired CaptureRequest settings override with which certain keys are
+applied earlier so that they can take effect sooner.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableSettingsOverrides">android.<wbr/>control.<wbr/>available<wbr/>Settings<wbr/>Overrides</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>There are some CaptureRequest keys which can be applied earlier than others
+when controls within a CaptureRequest aren't required to take effect at the same time.<wbr/>
+One such example is zoom.<wbr/> Zoom can be applied at a later stage of the camera pipeline.<wbr/>
+As soon as the camera device receives the CaptureRequest,<wbr/> it can apply the requested
+zoom value onto an earlier request that's already in the pipeline,<wbr/> thus improves zoom
+latency.<wbr/></p>
+<p>This key's value in the capture result reflects whether the controls for this capture
+are overridden "by" a newer request.<wbr/> This means that if a capture request turns on
+settings override,<wbr/> the capture result of an earlier request will contain the key value
+of ZOOM.<wbr/> On the other hand,<wbr/> if a capture request has settings override turned on,<wbr/>
+but all newer requests have it turned off,<wbr/> the key's value in the capture result will
+be OFF because this capture isn't overridden by a newer capture.<wbr/> In the two examples
+below,<wbr/> the capture results columns illustrate the settingsOverride values in different
+scenarios.<wbr/></p>
+<p>Assuming the zoom settings override can speed up by 1 frame,<wbr/> below example illustrates
+the speed-up at the start of capture session:</p>
+<pre><code>Camera session created
+Request 1 (zoom=1.<wbr/>0x,<wbr/> override=ZOOM) -&gt;
+Request 2 (zoom=1.<wbr/>2x,<wbr/> override=ZOOM) -&gt;
+Request 3 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM) -&gt;  Result 1 (zoom=1.<wbr/>2x,<wbr/> override=ZOOM)
+Request 4 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM) -&gt;  Result 2 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM)
+Request 5 (zoom=1.<wbr/>8x,<wbr/> override=ZOOM) -&gt;  Result 3 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM)
+                                     -&gt;  Result 4 (zoom=1.<wbr/>8x,<wbr/> override=ZOOM)
+                                     -&gt;  Result 5 (zoom=1.<wbr/>8x,<wbr/> override=OFF)
+</code></pre>
+<p>The application can turn on settings override and use zoom as normal.<wbr/> The example
+shows that the later zoom values (1.<wbr/>2x,<wbr/> 1.<wbr/>4x,<wbr/> 1.<wbr/>6x,<wbr/> and 1.<wbr/>8x) overwrite the zoom
+values (1.<wbr/>0x,<wbr/> 1.<wbr/>2x,<wbr/> 1.<wbr/>4x,<wbr/> and 1.<wbr/>8x) of earlier requests (#1,<wbr/> #2,<wbr/> #3,<wbr/> and #4).<wbr/></p>
+<p>The application must make sure the settings override doesn't interfere with user
+journeys requiring simultaneous application of all controls in CaptureRequest on the
+requested output targets.<wbr/> For example,<wbr/> if the application takes a still capture using
+CameraCaptureSession#capture,<wbr/> and the repeating request immediately sets a different
+zoom value using override,<wbr/> the inflight still capture could have its zoom value
+overwritten unexpectedly.<wbr/></p>
+<p>So the application is strongly recommended to turn off settingsOverride when taking
+still/<wbr/>burst captures,<wbr/> and turn it back on when there is only repeating viewfinder
+request and no inflight still/<wbr/>burst captures.<wbr/></p>
+<p>Below is the example demonstrating the transitions in and out of the
+settings override:</p>
+<pre><code>Request 1 (zoom=1.<wbr/>0x,<wbr/> override=OFF)
+Request 2 (zoom=1.<wbr/>2x,<wbr/> override=OFF)
+Request 3 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM)  -&gt; Result 1 (zoom=1.<wbr/>0x,<wbr/> override=OFF)
+Request 4 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM)  -&gt; Result 2 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM)
+Request 5 (zoom=1.<wbr/>8x,<wbr/> override=OFF)   -&gt; Result 3 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM)
+                                      -&gt; Result 4 (zoom=1.<wbr/>6x,<wbr/> override=OFF)
+                                      -&gt; Result 5 (zoom=1.<wbr/>8x,<wbr/> override=OFF)
+</code></pre>
+<p>This example shows that:</p>
+<ul>
+<li>The application "ramps in" settings override by setting the control to ZOOM.<wbr/>
+In the example,<wbr/> request #3 enables zoom settings override.<wbr/> Because the camera device
+can speed up applying zoom by 1 frame,<wbr/> the outputs of request #2 has 1.<wbr/>4x zoom,<wbr/> the
+value specified in request #3.<wbr/></li>
+<li>The application "ramps out" of settings override by setting the control to OFF.<wbr/> In
+the example,<wbr/> request #5 changes the override to OFF.<wbr/> Because request #4's zoom
+takes effect in result #3,<wbr/> result #4's zoom remains the same until new value takes
+effect in result #5.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The HAL must set this value to OFF in all of the Capture templates.<wbr/></p>
+<p>Typically the HAL unblocks processCaptureRequest at the same rate as the sensor capture,<wbr/>
+and the HAL queues the capture settings in its pipeline when processCaptureRequest
+returns.<wbr/> However,<wbr/> when the settings override is enabled,<wbr/> the HAL can optimize the
+overridden controls' latency by applying them as soon as processCaptureRequest is
+called,<wbr/> rather than when it's unblocked.<wbr/></p>
+<p>If zoom settings override is on,<wbr/> when zooming in,<wbr/> the HAL must be able to apply the
+zoom related settings at least 1 frame ahead.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.autoframing">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>autoframing
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Disable autoframing.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Enable autoframing to keep people in the frame's field of view.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                    <span class="entry_type_enum_notes"><p>Automatically select ON or OFF based on the system level preferences.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Automatic crop,<wbr/> pan and zoom to keep objects in the center of the frame.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Auto-framing is a special mode provided by the camera device to dynamically crop,<wbr/> zoom
+or pan the camera feed to try to ensure that the people in a scene occupy a reasonable
+portion of the viewport.<wbr/> It is primarily designed to support video calling in
+situations where the user isn't directly in front of the device,<wbr/> especially for
+wide-angle cameras.<wbr/>
+<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> and <a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a> in CaptureResult will be used
+to denote the coordinates of the auto-framed region.<wbr/>
+Zoom and video stabilization controls are disabled when auto-framing is enabled.<wbr/> The 3A
+regions must map the screen coordinates into the scaler crop returned from the capture
+result instead of using the active array sensor.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>While auto-framing is ON,<wbr/> the aspect ratio of the auto-framed region must match the
+aspect ratio of the configured output stream.<wbr/>
+When reporting CaptureResult,<wbr/> SCALER_<wbr/>CROP_<wbr/>REGION might not adequately describe the
+actual sensor pixels.<wbr/> In this case,<wbr/> it is acceptable for the returned parameters to only
+be an approximation of the image sensor region that is actually used.<wbr/>
+When auto-framing is turned off,<wbr/> the transition should be immediate,<wbr/> with no panning or
+zooming to transition to the settings requested by the app.<wbr/> When it is turned on,<wbr/> the
+transition can be immediate or smooth.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -6690,7 +7007,7 @@
 
 
 
-                <div class="entry_type_notes">Range of supported post RAW sensitivitiy boosts</div>
+                <div class="entry_type_notes">Range of supported post RAW sensitivity boosts</div>
 
 
             </td> <!-- entry_type -->
@@ -7108,6 +7425,129 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="static_android.control.availableSettingsOverrides">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>available<wbr/>Settings<wbr/>Overrides
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of available settings overrides supported by the camera device that can
+be used to speed up certain controls.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.settingsOverride">android.<wbr/>control.<wbr/>settings<wbr/>Override</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>When not all controls within a CaptureRequest are required to take effect
+at the same time on the outputs,<wbr/> the camera device may apply certain request keys sooner
+to improve latency.<wbr/> This list contains such supported settings overrides.<wbr/> Each settings
+override corresponds to a set of CaptureRequest keys that can be sped up when applying.<wbr/></p>
+<p>A supported settings override can be passed in via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_SETTINGS_OVERRIDE">Capture<wbr/>Request#CONTROL_<wbr/>SETTINGS_<wbr/>OVERRIDE</a>,<wbr/> and the
+CaptureRequest keys corresponding to the override are applied as soon as possible,<wbr/> not
+bound by per-frame synchronization.<wbr/> See <a href="#controls_android.control.settingsOverride">android.<wbr/>control.<wbr/>settings<wbr/>Override</a> for the
+CaptureRequest keys for each override.<wbr/></p>
+<p>OFF is always included in this list.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.autoframingAvailable">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>autoframing<wbr/>Available
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FALSE (v3.9)</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TRUE (v3.9)</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device supports <a href="#controls_android.control.autoframing">android.<wbr/>control.<wbr/>autoframing</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Will be <code>false</code> if auto-framing is not available.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -7570,7 +8010,7 @@
                     <span class="entry_type_enum_name">ON_EXTERNAL_FLASH (v3.3)</span>
                     <span class="entry_type_enum_notes"><p>An external flash has been turned on.<wbr/></p>
 <p>It informs the camera device that an external flash has been turned on,<wbr/> and that
-metering (and continuous focus if active) should be quickly recaculated to account
+metering (and continuous focus if active) should be quickly recalculated to account
 for the external flash.<wbr/> Otherwise,<wbr/> this mode acts like ON.<wbr/></p>
 <p>When the external flash is turned off,<wbr/> AE mode should be changed to one of the
 other available AE modes.<wbr/></p>
@@ -7748,7 +8188,9 @@
 mode.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/>
+capability or devices where
+<a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>
 <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
@@ -8562,7 +9004,10 @@
 mode.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/> <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
+capability or devices where
+<a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>,<wbr/>
+<a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
@@ -9612,7 +10057,10 @@
 mode.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/> <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
+capability or devices where
+<a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>,<wbr/>
+<a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
@@ -11119,6 +11567,376 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="dynamic_android.control.settingsOverride">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>settings<wbr/>Override
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>No keys are applied sooner than the other keys when applying CaptureRequest
+settings to the camera device.<wbr/> This is the default value.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZOOM (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Zoom related keys are applied sooner than the other keys in the CaptureRequest.<wbr/> The
+zoom related keys are:</p>
+<ul>
+<li><a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a></li>
+<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
+<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
+</ul>
+<p>Even though <a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>,<wbr/> <a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>,<wbr/>
+and <a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a> are not directly zoom related,<wbr/> applications
+typically scale these regions together with <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> to have a
+consistent mapping within the current field of view.<wbr/> In this aspect,<wbr/> they are
+related to <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> and <a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">VENDOR_START (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                    <span class="entry_type_enum_value">0x4000</span>
+                    <span class="entry_type_enum_notes"><p>Vendor defined settingsOverride.<wbr/> These depend on vendor implementation.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired CaptureRequest settings override with which certain keys are
+applied earlier so that they can take effect sooner.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableSettingsOverrides">android.<wbr/>control.<wbr/>available<wbr/>Settings<wbr/>Overrides</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>There are some CaptureRequest keys which can be applied earlier than others
+when controls within a CaptureRequest aren't required to take effect at the same time.<wbr/>
+One such example is zoom.<wbr/> Zoom can be applied at a later stage of the camera pipeline.<wbr/>
+As soon as the camera device receives the CaptureRequest,<wbr/> it can apply the requested
+zoom value onto an earlier request that's already in the pipeline,<wbr/> thus improves zoom
+latency.<wbr/></p>
+<p>This key's value in the capture result reflects whether the controls for this capture
+are overridden "by" a newer request.<wbr/> This means that if a capture request turns on
+settings override,<wbr/> the capture result of an earlier request will contain the key value
+of ZOOM.<wbr/> On the other hand,<wbr/> if a capture request has settings override turned on,<wbr/>
+but all newer requests have it turned off,<wbr/> the key's value in the capture result will
+be OFF because this capture isn't overridden by a newer capture.<wbr/> In the two examples
+below,<wbr/> the capture results columns illustrate the settingsOverride values in different
+scenarios.<wbr/></p>
+<p>Assuming the zoom settings override can speed up by 1 frame,<wbr/> below example illustrates
+the speed-up at the start of capture session:</p>
+<pre><code>Camera session created
+Request 1 (zoom=1.<wbr/>0x,<wbr/> override=ZOOM) -&gt;
+Request 2 (zoom=1.<wbr/>2x,<wbr/> override=ZOOM) -&gt;
+Request 3 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM) -&gt;  Result 1 (zoom=1.<wbr/>2x,<wbr/> override=ZOOM)
+Request 4 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM) -&gt;  Result 2 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM)
+Request 5 (zoom=1.<wbr/>8x,<wbr/> override=ZOOM) -&gt;  Result 3 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM)
+                                     -&gt;  Result 4 (zoom=1.<wbr/>8x,<wbr/> override=ZOOM)
+                                     -&gt;  Result 5 (zoom=1.<wbr/>8x,<wbr/> override=OFF)
+</code></pre>
+<p>The application can turn on settings override and use zoom as normal.<wbr/> The example
+shows that the later zoom values (1.<wbr/>2x,<wbr/> 1.<wbr/>4x,<wbr/> 1.<wbr/>6x,<wbr/> and 1.<wbr/>8x) overwrite the zoom
+values (1.<wbr/>0x,<wbr/> 1.<wbr/>2x,<wbr/> 1.<wbr/>4x,<wbr/> and 1.<wbr/>8x) of earlier requests (#1,<wbr/> #2,<wbr/> #3,<wbr/> and #4).<wbr/></p>
+<p>The application must make sure the settings override doesn't interfere with user
+journeys requiring simultaneous application of all controls in CaptureRequest on the
+requested output targets.<wbr/> For example,<wbr/> if the application takes a still capture using
+CameraCaptureSession#capture,<wbr/> and the repeating request immediately sets a different
+zoom value using override,<wbr/> the inflight still capture could have its zoom value
+overwritten unexpectedly.<wbr/></p>
+<p>So the application is strongly recommended to turn off settingsOverride when taking
+still/<wbr/>burst captures,<wbr/> and turn it back on when there is only repeating viewfinder
+request and no inflight still/<wbr/>burst captures.<wbr/></p>
+<p>Below is the example demonstrating the transitions in and out of the
+settings override:</p>
+<pre><code>Request 1 (zoom=1.<wbr/>0x,<wbr/> override=OFF)
+Request 2 (zoom=1.<wbr/>2x,<wbr/> override=OFF)
+Request 3 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM)  -&gt; Result 1 (zoom=1.<wbr/>0x,<wbr/> override=OFF)
+Request 4 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM)  -&gt; Result 2 (zoom=1.<wbr/>4x,<wbr/> override=ZOOM)
+Request 5 (zoom=1.<wbr/>8x,<wbr/> override=OFF)   -&gt; Result 3 (zoom=1.<wbr/>6x,<wbr/> override=ZOOM)
+                                      -&gt; Result 4 (zoom=1.<wbr/>6x,<wbr/> override=OFF)
+                                      -&gt; Result 5 (zoom=1.<wbr/>8x,<wbr/> override=OFF)
+</code></pre>
+<p>This example shows that:</p>
+<ul>
+<li>The application "ramps in" settings override by setting the control to ZOOM.<wbr/>
+In the example,<wbr/> request #3 enables zoom settings override.<wbr/> Because the camera device
+can speed up applying zoom by 1 frame,<wbr/> the outputs of request #2 has 1.<wbr/>4x zoom,<wbr/> the
+value specified in request #3.<wbr/></li>
+<li>The application "ramps out" of settings override by setting the control to OFF.<wbr/> In
+the example,<wbr/> request #5 changes the override to OFF.<wbr/> Because request #4's zoom
+takes effect in result #3,<wbr/> result #4's zoom remains the same until new value takes
+effect in result #5.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The HAL must set this value to OFF in all of the Capture templates.<wbr/></p>
+<p>Typically the HAL unblocks processCaptureRequest at the same rate as the sensor capture,<wbr/>
+and the HAL queues the capture settings in its pipeline when processCaptureRequest
+returns.<wbr/> However,<wbr/> when the settings override is enabled,<wbr/> the HAL can optimize the
+overridden controls' latency by applying them as soon as processCaptureRequest is
+called,<wbr/> rather than when it's unblocked.<wbr/></p>
+<p>If zoom settings override is on,<wbr/> when zooming in,<wbr/> the HAL must be able to apply the
+zoom related settings at least 1 frame ahead.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.settingsOverridingFrameNumber">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>settings<wbr/>Overriding<wbr/>Frame<wbr/>Number
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The frame number of the newer request overriding this capture.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Must be equal to the frame number of the result if this capture isn't
+overridden by a newer request,<wbr/> i.<wbr/>e.<wbr/> if <a href="#controls_android.control.settingsOverride">android.<wbr/>control.<wbr/>settings<wbr/>Override</a> is OFF
+in the capture result.<wbr/> On the other hand,<wbr/> if the capture is overridden by a newer
+request,<wbr/> the value of this tag (unsigned) must be larger than the frame number of
+the capture result.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.autoframing">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>autoframing
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Disable autoframing.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Enable autoframing to keep people in the frame's field of view.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                    <span class="entry_type_enum_notes"><p>Automatically select ON or OFF based on the system level preferences.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Automatic crop,<wbr/> pan and zoom to keep objects in the center of the frame.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Auto-framing is a special mode provided by the camera device to dynamically crop,<wbr/> zoom
+or pan the camera feed to try to ensure that the people in a scene occupy a reasonable
+portion of the viewport.<wbr/> It is primarily designed to support video calling in
+situations where the user isn't directly in front of the device,<wbr/> especially for
+wide-angle cameras.<wbr/>
+<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> and <a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a> in CaptureResult will be used
+to denote the coordinates of the auto-framed region.<wbr/>
+Zoom and video stabilization controls are disabled when auto-framing is enabled.<wbr/> The 3A
+regions must map the screen coordinates into the scaler crop returned from the capture
+result instead of using the active array sensor.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>While auto-framing is ON,<wbr/> the aspect ratio of the auto-framed region must match the
+aspect ratio of the configured output stream.<wbr/>
+When reporting CaptureResult,<wbr/> SCALER_<wbr/>CROP_<wbr/>REGION might not adequately describe the
+actual sensor pixels.<wbr/> In this case,<wbr/> it is acceptable for the returned parameters to only
+be an approximation of the image sensor region that is actually used.<wbr/>
+When auto-framing is turned off,<wbr/> the transition should be immediate,<wbr/> with no panning or
+zooming to transition to the settings requested by the app.<wbr/> When it is turned on,<wbr/> the
+transition can be immediate or smooth.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.autoframingState">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>autoframing<wbr/>State
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">INACTIVE (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Auto-framing is inactive.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FRAMING (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Auto-framing is in process - either zooming in,<wbr/> zooming out or pan is taking place.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONVERGED (v3.9)</span>
+                    <span class="entry_type_enum_notes"><p>Auto-framing has reached a stable state (frame/<wbr/>fov is not being adjusted).<wbr/> The state
+may transition back to FRAMING if the scene changes.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Current state of auto-framing.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>When the camera doesn't have auto-framing available (i.<wbr/>e
+<code><a href="#static_android.control.autoframingAvailable">android.<wbr/>control.<wbr/>autoframing<wbr/>Available</a></code> == false) or it is not enabled (i.<wbr/>e
+<code><a href="#controls_android.control.autoframing">android.<wbr/>control.<wbr/>autoframing</a></code> == OFF),<wbr/> the state will always be INACTIVE.<wbr/>
+Other states indicate the current auto-framing state:</p>
+<ul>
+<li>When <code><a href="#controls_android.control.autoframing">android.<wbr/>control.<wbr/>autoframing</a></code> is set to ON,<wbr/> auto-framing will take
+place.<wbr/> While the frame is aligning itself to center the object (doing things like
+zooming in,<wbr/> zooming out or pan),<wbr/> the state will be FRAMING.<wbr/></li>
+<li>When field of view is not being adjusted anymore and has reached a stable state,<wbr/> the
+state will be CONVERGED.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -12085,6 +12903,10 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#turnOnTorchWithStrengthLevel">CameraManager#turnOnTorchWithStrengthLevel</a>.<wbr/>
 If this value is equal to 1,<wbr/> flashlight brightness control is not supported.<wbr/>
 The value for this key will be null for devices with no flash unit.<wbr/></p>
+<p>The maximum value is guaranteed to be safe to use for an indefinite duration in
+terms of device flashlight lifespan,<wbr/> but may be too bright for comfort for many
+use cases.<wbr/> Use the default torch brightness value to avoid problems with an
+over-bright flashlight.<wbr/></p>
             </td>
           </tr>
 
@@ -19392,7 +20214,8 @@
 <ul>
 <li>Profile <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#HLG10">DynamicRangeProfiles#HLG10</a></li>
 <li>All mandatory stream combinations for this specific capability as per
-  documentation <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a></li>
+  documentation
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#10-bit-output-additional-guaranteed-configurations">CameraDevice#10-bit-output-additional-guaranteed-configurations</a></li>
 <li>In case the device is not able to capture some combination of supported
   standard 8-bit and/<wbr/>or 10-bit dynamic range profiles within the same capture request,<wbr/>
   then those constraints must be listed in
@@ -19430,9 +20253,28 @@
 </ul>
 <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">Camera<wbr/>Characteristics#SCALER_<wbr/>AVAILABLE_<wbr/>STREAM_<wbr/>USE_<wbr/>CASES</a>
 lists all of the supported stream use cases.<wbr/></p>
-<p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
-mandatory stream combinations involving stream use cases,<wbr/> which can also be queried
-via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.<wbr/></p></span>
+<p>Refer to
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">CameraDevice#stream-use-case-capability-additional-guaranteed-configurations</a>
+for the mandatory stream combinations involving stream use cases,<wbr/> which can also be
+queried via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COLOR_SPACE_PROFILES (v3.9)</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_hidden">[java_public]</span>
+                    <span class="entry_type_enum_notes"><p>The device supports querying the possible combinations of color spaces,<wbr/> image
+formats,<wbr/> and dynamic range profiles supported by the camera and requesting a
+particular color space for a session via
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a>.<wbr/></p>
+<p>Cameras that enable this capability may or may not also implement dynamic range
+profiles.<wbr/> If they don't,<wbr/>
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/ColorSpaceProfiles.html#getSupportedDynamicRangeProfiles">ColorSpaceProfiles#getSupportedDynamicRangeProfiles</a>
+will return only
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a> and
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/ColorSpaceProfiles.html#getSupportedColorSpacesForDynamicRange">ColorSpaceProfiles#getSupportedColorSpacesForDynamicRange</a>
+will assume support of the
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>
+profile in all combinations of color spaces and image formats.<wbr/></p></span>
                   </li>
                 </ul>
 
@@ -20394,6 +21236,181 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="static_android.request.availableColorSpaceProfiles">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>available<wbr/>Color<wbr/>Space<wbr/>Profiles
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [java_public as colorSpaceProfiles]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>An interface for querying the color space profiles supported by a camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>A color space profile is a combination of a color space,<wbr/> an image format,<wbr/> and a dynamic
+range profile.<wbr/> Camera clients can retrieve the list of supported color spaces by calling
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/ColorSpaceProfiles.html#getSupportedColorSpaces">ColorSpaceProfiles#getSupportedColorSpaces</a> or
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/ColorSpaceProfiles.html#getSupportedColorSpacesForDynamicRange">ColorSpaceProfiles#getSupportedColorSpacesForDynamicRange</a>.<wbr/>
+If a camera does not support the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>DYNAMIC_<wbr/>RANGE_<wbr/>TEN_<wbr/>BIT</a>
+capability,<wbr/> the dynamic range profile will always be
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>.<wbr/> Color space
+capabilities are queried in combination with an <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html">ImageFormat</a>.<wbr/>
+If a camera client wants to know the general color space capabilities of a camera device
+regardless of image format,<wbr/> it can specify <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#UNKNOWN">ImageFormat#UNKNOWN</a>.<wbr/>
+The color space for a session can be configured by setting the SessionConfiguration
+color space via <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableColorSpaceProfilesMap">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Color<wbr/>Space<wbr/>Profiles<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 3
+                </span>
+              <span class="entry_type_visibility"> [ndk_public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">UNSPECIFIED (v3.9)</span>
+                    <span class="entry_type_enum_value">-1</span>
+                    <span class="entry_type_enum_notes"><p>Default value,<wbr/> when not explicitly specified.<wbr/> The Camera device will choose the color
+space to employ.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SRGB (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[system]</span>
+                    <span class="entry_type_enum_value">0</span>
+                    <span class="entry_type_enum_notes"><p>RGB color space sRGB standardized as IEC 61966-2.<wbr/>1:1999.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DISPLAY_P3 (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[system]</span>
+                    <span class="entry_type_enum_value">7</span>
+                    <span class="entry_type_enum_notes"><p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.<wbr/>1:1999.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BT2020_HLG (v3.9)</span>
+                    <span class="entry_type_enum_hidden">[system]</span>
+                    <span class="entry_type_enum_value">16</span>
+                    <span class="entry_type_enum_notes"><p>RGB color space BT.<wbr/>2100 standardized as Hybrid Log Gamma encoding.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all possible color space profiles supported by a camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>A color space profile is a combination of a color space,<wbr/> an image format,<wbr/> and a dynamic range
+profile.<wbr/> If a camera does not support the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>DYNAMIC_<wbr/>RANGE_<wbr/>TEN_<wbr/>BIT</a>
+capability,<wbr/> the dynamic range profile will always be
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>.<wbr/> Camera clients can
+use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+a color space.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The array contains three entries per supported profile:</p>
+<p>1) The supported color space.<wbr/>
+2) An image format which can be used with this color space.<wbr/>
+3) A bitmap of all compatible dynamic range profiles,<wbr/> if the device is HDR-capable.<wbr/></p>
+<p>The possible values for #1 are the positive values of the
+ANDROID_<wbr/>REQUEST_<wbr/>AVAILABLE_<wbr/>COLOR_<wbr/>SPACE_<wbr/>PROFILES_<wbr/>MAP_<wbr/><em> enum,<wbr/> which is equivalent to
+<a href="https://developer.android.com/reference/android/graphics/ColorSpace/Named.html">Named</a> and its ordinals.<wbr/> UNSPECIFIED should not be
+used here.<wbr/> It should be noted that not all <a href="https://developer.android.com/reference/android/graphics/ColorSpace/Named.html">Named</a>
+values are supported,<wbr/> only those in the
+ANDROID_<wbr/>REQUEST_<wbr/>AVAILABLE_<wbr/>COLOR_<wbr/>SPACE_<wbr/>PROFILES_<wbr/>MAP_<wbr/></em> enum.<wbr/></p>
+<p>The possible values for #2 consist of the public-facing image/<wbr/>pixel formats,<wbr/> found at
+<a href="https://developer.android.com/reference/android/graphics/ImageFormat.html">ImageFormat</a> and <a href="https://developer.android.com/reference/android/graphics/PixelFormat.html">PixelFormat</a>.<wbr/> Each map
+to a HAL pixel format except for <a href="https://developer.android.com/reference/android/graphics/ImageFormat/JPEG.html">JPEG</a>,<wbr/>
+<a href="https://developer.android.com/reference/android/graphics/ImageFormat/HEIC.html">HEIC</a>,<wbr/> and
+<a href="https://developer.android.com/reference/android/graphics/ImageFormat/DEPTH_JPEG.html">DEPTH_<wbr/>JPEG</a>.<wbr/> Depth formats besides DEPTH_<wbr/>JPEG are
+not applicable and should not be specified.<wbr/> If there are no constraints on the type of
+image format a color space is compatible with,<wbr/> this can be
+<a href="https://developer.android.com/reference/android/graphics/ImageFormat/UNKNOWN.html">UNKNOWN</a>.<wbr/></p>
+<p>If the device is not HDR-capable,<wbr/> #3 should always be
+ANDROID_<wbr/>REQUEST_<wbr/>AVAILABLE_<wbr/>DYNAMIC_<wbr/>RANGE_<wbr/>PROFILES_<wbr/>STANDARD.<wbr/> Otherwise,<wbr/> #3 should be a
+bitmap of the compatible ANDROID_<wbr/>REQUEST_<wbr/>AVAILABLE_<wbr/>DYNAMIC_<wbr/>RANGE_<wbr/>PROFILES_<wbr/>* values.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -20821,9 +21838,9 @@
 <p>Output streams use this rectangle to produce their output,<wbr/> cropping to a smaller region
 if necessary to maintain the stream's aspect ratio,<wbr/> then scaling the sensor input to
 match the output's configured resolution.<wbr/></p>
-<p>The crop region is applied after the RAW to other color space (e.<wbr/>g.<wbr/> YUV)
-conversion.<wbr/> Since raw streams (e.<wbr/>g.<wbr/> RAW16) don't have the conversion stage,<wbr/> they are not
-croppable.<wbr/> The crop region will be ignored by raw streams.<wbr/></p>
+<p>The crop region is usually applied after the RAW to other color space (e.<wbr/>g.<wbr/> YUV)
+conversion.<wbr/> As a result RAW streams are not croppable unless supported by the
+camera device.<wbr/> See <a href="#static_android.scaler.availableStreamUseCases">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Use<wbr/>Cases</a>#CROPPED_<wbr/>RAW for details.<wbr/></p>
 <p>For non-raw streams,<wbr/> any additional per-stream cropping will be done to maximize the
 final pixel area of the stream.<wbr/></p>
 <p>For example,<wbr/> if the crop region is set to a 4:3 aspect ratio,<wbr/> then 4:3 streams will use
@@ -20904,7 +21921,9 @@
 <a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a> for details.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/> <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
+capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a></p>
+<p><a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
@@ -22692,8 +23711,9 @@
 or if the camera device isn't a primary rear/<wbr/>front camera,<wbr/> the minimum required output
 stream configurations are the same as for applications targeting SDK version older than
 31.<wbr/></p>
-<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for additional mandatory
-stream configurations on a per-capability basis.<wbr/></p>
+<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> and
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">CameraDevice#legacy-level-guaranteed-configurations</a>
+for additional mandatory stream configurations on a per-capability basis.<wbr/></p>
 <p>*1: For JPEG format,<wbr/> the sizes may be restricted by below conditions:</p>
 <ul>
 <li>The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
@@ -22751,7 +23771,7 @@
 <p>Note that for Performance Class 12 or higher primary cameras (first rear/<wbr/>front facing
 camera in the camera ID list),<wbr/> camera framework filters out JPEG sizes smaller than
 1080p depending on applications' targetSdkLevel.<wbr/> The camera HAL must still support the
-smaller JPEG sizes to maintain backward comopatibility.<wbr/></p>
+smaller JPEG sizes to maintain backward compatibility.<wbr/></p>
 <p>For LIMITED capability devices
 (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
 the HAL only has to list up to the maximum video size
@@ -23130,7 +24150,7 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL">Camera<wbr/>Characteristics#INFO_<wbr/>SUPPORTED_<wbr/>HARDWARE_<wbr/>LEVEL</a>
 and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES</a>.<wbr/>
 This is an app-readable conversion of the mandatory stream combination
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">tables</a>.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">tables</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -23155,8 +24175,7 @@
               <p>The array of
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">combinations</a> is
 generated according to the documented
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a> based on
-specific device level and capabilities.<wbr/>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">guideline</a> based on specific device level and capabilities.<wbr/>
 Clients can use the array as a quick reference to find an appropriate camera stream
 combination.<wbr/>
 As per documentation,<wbr/> the stream combinations with given PREVIEW,<wbr/> RECORD and
@@ -23211,7 +24230,7 @@
             <td class="entry_description">
               <p>An array of mandatory concurrent stream combinations.<wbr/>
 This is an app-readable conversion of the concurrent mandatory stream combination
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">tables</a>.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#concurrent-stream-guaranteed-configurations">tables</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -23236,8 +24255,7 @@
               <p>The array of
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">combinations</a> is
 generated according to the documented
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a> for each
-device which has its Id present in the set returned by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#concurrent-stream-guaranteed-configurations">guideline</a> for each device which has its Id present in the set returned by
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#getConcurrentCameraIds">CameraManager#getConcurrentCameraIds</a>.<wbr/>
 Clients can use the array as a quick reference to find an appropriate camera stream
 combination.<wbr/>
@@ -23536,7 +24554,8 @@
 <p>If a camera device supports multi-resolution output streams for a particular format,<wbr/> for
 each of its mandatory stream combinations,<wbr/> the camera device will support using a
 MultiResolutionImageReader for the MAXIMUM stream of supported formats.<wbr/> Refer to
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for additional details.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs">CameraDevice#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs</a>
+for additional details.<wbr/></p>
 <p>To use multi-resolution input streams,<wbr/> the supported formats can be queried by <a href="https://developer.android.com/reference/android/hardware/camera2/params/MultiResolutionStreamConfigurationMap.html#getInputFormats">MultiResolutionStreamConfigurationMap#getInputFormats</a>.<wbr/>
 A reprocessable CameraCaptureSession can then be created using an <a href="https://developer.android.com/reference/android/hardware/camera2/params/InputConfiguration.html">InputConfiguration</a> constructed with
 the input MultiResolutionStreamInfo group,<wbr/> queried by <a href="https://developer.android.com/reference/android/hardware/camera2/params/MultiResolutionStreamConfigurationMap.html#getInputInfo">MultiResolutionStreamConfigurationMap#getInputInfo</a>.<wbr/></p>
@@ -23544,8 +24563,8 @@
 {@code YUV} output,<wbr/> or multi-resolution {@code PRIVATE} input and multi-resolution
 {@code PRIVATE} output,<wbr/> {@code JPEG} and {@code YUV} are guaranteed to be supported
 multi-resolution output stream formats.<wbr/> Refer to
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for
-details about the additional mandatory stream combinations in this case.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs">CameraDevice#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs</a>}
+for details about the additional mandatory stream combinations in this case.<wbr/></p>
             </td>
           </tr>
 
@@ -23937,7 +24956,7 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html">CaptureRequest</a> has <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> set
 to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/>
 This is an app-readable conversion of the maximum resolution mandatory stream combination
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">tables</a>.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#additional-guaranteed-combinations-for-ultra-high-resolution-sensors">tables</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -23962,8 +24981,7 @@
               <p>The array of
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">combinations</a> is
 generated according to the documented
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a> for each
-device which has the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#additional-guaranteed-combinations-for-ultra-high-resolution-sensors">guideline</a> for each device which has the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
 capability.<wbr/>
 Clients can use the array as a quick reference to find an appropriate camera stream
@@ -24015,7 +25033,7 @@
 10-bit output capability
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>DYNAMIC_<wbr/>RANGE_<wbr/>TEN_<wbr/>BIT</a>
 This is an app-readable conversion of the 10 bit output mandatory stream combination
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">tables</a>.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#10-bit-output-additional-guaranteed-configurations">tables</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -24040,8 +25058,7 @@
               <p>The array of
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">combinations</a> is
 generated according to the documented
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a> for each
-device which has the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#10-bit-output-additional-guaranteed-configurations">guideline</a> for each device which has the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>DYNAMIC_<wbr/>RANGE_<wbr/>TEN_<wbr/>BIT</a>
 capability.<wbr/>
 Clients can use the array as a quick reference to find an appropriate camera stream
@@ -24092,7 +25109,8 @@
               <p>An array of mandatory stream combinations which are applicable when device lists
 {@code PREVIEW_<wbr/>STABILIZATION} in <a href="#static_android.control.availableVideoStabilizationModes">android.<wbr/>control.<wbr/>available<wbr/>Video<wbr/>Stabilization<wbr/>Modes</a>.<wbr/>
 This is an app-readable conversion of the preview stabilization mandatory stream
-combination <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">tables</a>.<wbr/></p>
+combination
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#preview-stabilization-guaranteed-stream-configurations">tables</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -24117,8 +25135,7 @@
               <p>The array of
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">combinations</a> is
 generated according to the documented
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a> for each
-device which supports {@code PREVIEW_<wbr/>STABILIZATION}
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#preview-stabilization-guaranteed-stream-configurations">guideline</a> for each device which supports {@code PREVIEW_<wbr/>STABILIZATION}
 Clients can use the array as a quick reference to find an appropriate camera stream
 combination.<wbr/>
 The mandatory stream combination array will be {@code null} in case the device does not
@@ -24329,6 +25346,29 @@
 variable frame rate settings to allow sufficient exposure time in low light.<wbr/></p></span>
                   </li>
                   <li>
+                    <span class="entry_type_enum_name">CROPPED_RAW (v3.9)</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_value">0x6</span>
+                    <span class="entry_type_enum_notes"><p>Cropped RAW stream when the client chooses to crop the field of view.<wbr/></p>
+<p>Certain types of image sensors can run in binned modes in order to improve signal to
+noise ratio while capturing frames.<wbr/> However,<wbr/> at certain zoom levels and /<wbr/> or when
+other scene conditions are deemed fit,<wbr/> the camera sub-system may choose to un-bin and
+remosaic the sensor's output.<wbr/> This results in a RAW frame which is cropped in field
+of view and yet has the same number of pixels as full field of view RAW,<wbr/> thereby
+improving image detail.<wbr/></p>
+<p>The resultant field of view of the RAW stream will be greater than or equal to
+croppable non-RAW streams.<wbr/> The effective crop region for this RAW stream will be
+reflected in the CaptureResult key <a href="#dynamic_android.scaler.rawCropRegion">android.<wbr/>scaler.<wbr/>raw<wbr/>Crop<wbr/>Region</a>.<wbr/></p>
+<p>If this stream use case is set on a non-RAW stream,<wbr/> i.<wbr/>e.<wbr/> not one of :</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a></li>
+</ul>
+<p>session configuration is not guaranteed to succeed.<wbr/></p>
+<p>This stream use case may not be supported on some devices.<wbr/></p></span>
+                  </li>
+                  <li>
                     <span class="entry_type_enum_name">VENDOR_START (v3.8)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
@@ -24389,8 +25429,8 @@
 <p>The guaranteed stream combinations related to stream use case for a camera device with
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>STREAM_<wbr/>USE_<wbr/>CASE</a>
 capability is documented in the camera device
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>.<wbr/> The
-application is strongly recommended to use one of the guaranteed stream combinations.<wbr/>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>.<wbr/> The application is strongly recommended to use one of the guaranteed stream
+combinations.<wbr/>
 If the application creates a session with a stream combination not in the guaranteed
 list,<wbr/> or with mixed DEFAULT and non-DEFAULT use cases within the same session,<wbr/>
 the camera device may ignore some stream use cases due to hardware constraints
@@ -24441,8 +25481,7 @@
             <td class="entry_description">
               <p>An array of mandatory stream combinations with stream use cases.<wbr/>
 This is an app-readable conversion of the mandatory stream combination
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">tables</a> with
-each stream's use case being set.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">tables</a> with each stream's use case being set.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -24467,8 +25506,7 @@
               <p>The array of
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">combinations</a> is
 generated according to the documented
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a> for a
-camera device with
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">guideline</a> for a camera device with
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">Camera<wbr/>Characteristics#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>STREAM_<wbr/>USE_<wbr/>CASE</a>
 capability.<wbr/>
 The mandatory stream combination array will be {@code null} in case the device doesn't
@@ -24585,9 +25623,9 @@
 <p>Output streams use this rectangle to produce their output,<wbr/> cropping to a smaller region
 if necessary to maintain the stream's aspect ratio,<wbr/> then scaling the sensor input to
 match the output's configured resolution.<wbr/></p>
-<p>The crop region is applied after the RAW to other color space (e.<wbr/>g.<wbr/> YUV)
-conversion.<wbr/> Since raw streams (e.<wbr/>g.<wbr/> RAW16) don't have the conversion stage,<wbr/> they are not
-croppable.<wbr/> The crop region will be ignored by raw streams.<wbr/></p>
+<p>The crop region is usually applied after the RAW to other color space (e.<wbr/>g.<wbr/> YUV)
+conversion.<wbr/> As a result RAW streams are not croppable unless supported by the
+camera device.<wbr/> See <a href="#static_android.scaler.availableStreamUseCases">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Use<wbr/>Cases</a>#CROPPED_<wbr/>RAW for details.<wbr/></p>
 <p>For non-raw streams,<wbr/> any additional per-stream cropping will be done to maximize the
 final pixel area of the stream.<wbr/></p>
 <p>For example,<wbr/> if the crop region is set to a 4:3 aspect ratio,<wbr/> then 4:3 streams will use
@@ -24668,7 +25706,9 @@
 <a href="#controls_android.control.zoomRatio">android.<wbr/>control.<wbr/>zoom<wbr/>Ratio</a> for details.<wbr/></p>
 <p>For camera devices with the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability,<wbr/> <a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
+capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a></p>
+<p><a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> /<wbr/>
 <a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a> must be used as the
 coordinate system for requests where <a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
@@ -24863,6 +25903,94 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="dynamic_android.scaler.rawCropRegion">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>raw<wbr/>Crop<wbr/>Region
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The region of the sensor that corresponds to the RAW read out for this
+capture when the stream use case of a RAW stream is set to CROPPED_<wbr/>RAW.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates relative to
+          android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size or
+          android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size depending on distortion correction
+          capability and mode
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The coordinate system follows that of <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>This CaptureResult key will be set when the corresponding CaptureRequest has a RAW target
+with stream use case set to
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW">Camera<wbr/>Metadata#SCALER_<wbr/>AVAILABLE_<wbr/>STREAM_<wbr/>USE_<wbr/>CASES_<wbr/>CROPPED_<wbr/>RAW</a>,<wbr/>
+otherwise it will be {@code null}.<wbr/>
+The value of this key specifies the region of the sensor used for the RAW capture and can
+be used to calculate the corresponding field of view of RAW streams.<wbr/>
+This field of view will always be &gt;= field of view for (processed) non-RAW streams for the
+capture.<wbr/> Note: The region specified may not necessarily be centered.<wbr/></p>
+<p>For example: Assume a camera device has a pre correction active array size of
+{@code {0,<wbr/> 0,<wbr/> 1500,<wbr/> 2000}}.<wbr/> If the RAW_<wbr/>CROP_<wbr/>REGION is {@code {500,<wbr/> 375,<wbr/> 1500,<wbr/> 1125}},<wbr/> that
+corresponds to a centered crop of 1/<wbr/>4th of the full field of view RAW stream.<wbr/></p>
+<p>The metadata keys which describe properties of RAW frames:</p>
+<ul>
+<li><a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a></li>
+<li><a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a></li>
+<li><a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a></li>
+<li><a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a></li>
+<li><a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a></li>
+<li><a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a></li>
+<li><a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a></li>
+</ul>
+<p>should be interpreted in the effective after raw crop field-of-view coordinate system.<wbr/>
+In this coordinate system,<wbr/>
+{preCorrectionActiveArraySize.<wbr/>left,<wbr/> preCorrectionActiveArraySize.<wbr/>top} corresponds to the
+the top left corner of the cropped RAW frame and
+{preCorrectionActiveArraySize.<wbr/>right,<wbr/> preCorrectionActiveArraySize.<wbr/>bottom} corresponds to
+the bottom right corner.<wbr/> Client applications must use the values of the keys
+in the CaptureResult metadata if present.<wbr/></p>
+<p>Crop regions (android.<wbr/>scaler.<wbr/>Crop<wbr/>Region),<wbr/> AE/<wbr/>AWB/<wbr/>AF regions and face coordinates still
+use the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate system as usual.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -25423,15 +26551,11 @@
                 <ul class="entry_type_enum">
                   <li>
                     <span class="entry_type_enum_name">DEFAULT (v3.6)</span>
-                    <span class="entry_type_enum_notes"><p>This is the default sensor pixel mode.<wbr/> This is the only sensor pixel mode
-supported unless a camera device advertises
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>.<wbr/></p></span>
+                    <span class="entry_type_enum_notes"><p>This is the default sensor pixel mode.<wbr/></p></span>
                   </li>
                   <li>
                     <span class="entry_type_enum_name">MAXIMUM_RESOLUTION (v3.6)</span>
-                    <span class="entry_type_enum_notes"><p>This sensor pixel mode is offered by devices with capability
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>.<wbr/>
-In this mode,<wbr/> sensors typically do not bin pixels,<wbr/> as a result can offer larger
+                    <span class="entry_type_enum_notes"><p>In this mode,<wbr/> sensors typically do not bin pixels,<wbr/> as a result can offer larger
 image sizes.<wbr/></p></span>
                   </li>
                 </ul>
@@ -25467,13 +26591,10 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>DEFAULT</a> mode.<wbr/>
 When operating in
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>DEFAULT</a> mode,<wbr/> sensors
-with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability would typically perform pixel binning in order to improve low light
+would typically perform pixel binning in order to improve low light
 performance,<wbr/> noise reduction etc.<wbr/> However,<wbr/> in
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>
-mode (supported only
-by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-sensors),<wbr/> sensors typically operate in unbinned mode allowing for a larger image size.<wbr/>
+mode,<wbr/> sensors typically operate in unbinned mode allowing for a larger image size.<wbr/>
 The stream configurations supported in
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>
 mode are also different from those of
@@ -25487,7 +26608,32 @@
 <code><a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a></code>
 must not be mixed in the same CaptureRequest.<wbr/> In other words,<wbr/> these outputs are
 exclusive to each other.<wbr/>
-This key does not need to be set for reprocess requests.<wbr/></p>
+This key does not need to be set for reprocess requests.<wbr/>
+This key will be be present on devices supporting the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
+capability.<wbr/> It may also be present on devices which do not support the aforementioned
+capability.<wbr/> In that case:</p>
+<ul>
+<li>
+<p>The mandatory stream combinations listed in
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
+  would not apply.<wbr/></p>
+</li>
+<li>
+<p>The bayer pattern of {@code RAW} streams when
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>
+  is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.<wbr/></p>
+</li>
+<li>
+<p>The following keys will always be present:</p>
+<ul>
+<li><a href="#static_android.scaler.streamConfigurationMapMaximumResolution">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map<wbr/>Maximum<wbr/>Resolution</a></li>
+<li><a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a></li>
+<li><a href="#static_android.sensor.info.pixelArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a></li>
+<li><a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a></li>
+</ul>
+</li>
+</ul>
             </td>
           </tr>
 
@@ -26477,7 +27623,8 @@
 counterparts.<wbr/>
 This key will only be present for devices which advertise the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability.<wbr/></p>
+capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a></p>
             </td>
           </tr>
 
@@ -26555,7 +27702,8 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/>
 This key will only be present for devices which advertise the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability.<wbr/></p>
+capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a></p>
             </td>
           </tr>
 
@@ -26622,7 +27770,8 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/>
 This key will only be present for devices which advertise the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability.<wbr/></p>
+capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a></p>
             </td>
           </tr>
 
@@ -26695,12 +27844,25 @@
 to improve various aspects of imaging such as noise reduction,<wbr/> low light
 performance etc.<wbr/> These groups can be of various sizes such as 2X2 (quad bayer),<wbr/>
 3X3 (nona-bayer).<wbr/> This key specifies the length and width of the pixels grouped under
-the same color filter.<wbr/></p>
-<p>This key will not be present if REMOSAIC_<wbr/>REPROCESSING is not supported,<wbr/> since RAW images
-will have a regular bayer pattern.<wbr/></p>
-<p>This key will not be present for sensors which don't have the
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability.<wbr/></p>
+the same color filter.<wbr/>
+In case the device has the
+<a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
+capability :</p>
+<ul>
+<li>This key will not be present if REMOSAIC_<wbr/>REPROCESSING is not supported,<wbr/> since RAW
+  images will have a regular bayer pattern.<wbr/></li>
+</ul>
+<p>In case the device does not have the
+<a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
+capability :</p>
+<ul>
+<li>This key will be present if
+  <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+  lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a>,<wbr/> since RAW
+  images may not necessarily have a regular bayer pattern when
+  <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE"><a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a></a> is set to
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></li>
+</ul>
             </td>
           </tr>
 
@@ -27971,6 +29133,96 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="static_android.sensor.readoutTimestamp">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>readout<wbr/>Timestamp
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [fwk_java_public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">NOT_SUPPORTED (v3.8)</span>
+                    <span class="entry_type_enum_notes"><p>This camera device doesn't support readout timestamp and onReadoutStarted
+callback.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HARDWARE (v3.8)</span>
+                    <span class="entry_type_enum_notes"><p>This camera device supports the onReadoutStarted callback as well as outputting
+readout timestamps.<wbr/> The readout timestamp is generated by the camera hardware and it
+has the same accuracy and timing characteristics of the start-of-exposure time.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether or not the camera device supports readout timestamp and
+{@code onReadoutStarted} callback.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>8</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>If this tag is {@code HARDWARE},<wbr/> the camera device calls
+<a href="https://developer.android.com/reference/CameraCaptureSession/CaptureCallback.html#onReadoutStarted">CaptureCallback#onReadoutStarted</a> in addition to the
+<a href="https://developer.android.com/reference/CameraCaptureSession/CaptureCallback.html#onCaptureStarted">CaptureCallback#onCaptureStarted</a> callback for each capture.<wbr/>
+The timestamp passed into the callback is the start of camera image readout rather than
+the start of the exposure.<wbr/> The timestamp source of
+<a href="https://developer.android.com/reference/CameraCaptureSession/CaptureCallback.html#onReadoutStarted">CaptureCallback#onReadoutStarted</a> is the same as that of
+<a href="https://developer.android.com/reference/CameraCaptureSession/CaptureCallback.html#onCaptureStarted">CaptureCallback#onCaptureStarted</a>.<wbr/></p>
+<p>In addition,<wbr/> the application can switch an output surface's timestamp from start of
+exposure to start of readout by calling
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setReadoutTimestampEnabled">OutputConfiguration#setReadoutTimestampEnabled</a>.<wbr/></p>
+<p>The readout timestamp is beneficial for video recording,<wbr/> because the encoder favors
+uniform timestamps,<wbr/> and the readout timestamps better reflect the cadence camera sensors
+output data.<wbr/></p>
+<p>Note that the camera device produces the start-of-exposure and start-of-readout callbacks
+together.<wbr/> As a result,<wbr/> the <a href="https://developer.android.com/reference/CameraCaptureSession/CaptureCallback.html#onReadoutStarted">CaptureCallback#onReadoutStarted</a>
+is called right after <a href="https://developer.android.com/reference/CameraCaptureSession/CaptureCallback.html#onCaptureStarted">CaptureCallback#onCaptureStarted</a>.<wbr/> The
+difference in start-of-readout and start-of-exposure is the sensor exposure time,<wbr/> plus
+certain constant offset.<wbr/> The offset is usually due to camera sensor level crop,<wbr/> and it is
+generally constant over time for the same set of output resolutions and capture settings.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>This property is populated by the camera framework and must not be set at the HAL layer.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -29286,15 +30538,11 @@
                 <ul class="entry_type_enum">
                   <li>
                     <span class="entry_type_enum_name">DEFAULT (v3.6)</span>
-                    <span class="entry_type_enum_notes"><p>This is the default sensor pixel mode.<wbr/> This is the only sensor pixel mode
-supported unless a camera device advertises
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>.<wbr/></p></span>
+                    <span class="entry_type_enum_notes"><p>This is the default sensor pixel mode.<wbr/></p></span>
                   </li>
                   <li>
                     <span class="entry_type_enum_name">MAXIMUM_RESOLUTION (v3.6)</span>
-                    <span class="entry_type_enum_notes"><p>This sensor pixel mode is offered by devices with capability
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>.<wbr/>
-In this mode,<wbr/> sensors typically do not bin pixels,<wbr/> as a result can offer larger
+                    <span class="entry_type_enum_notes"><p>In this mode,<wbr/> sensors typically do not bin pixels,<wbr/> as a result can offer larger
 image sizes.<wbr/></p></span>
                   </li>
                 </ul>
@@ -29330,13 +30578,10 @@
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>DEFAULT</a> mode.<wbr/>
 When operating in
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>DEFAULT</a> mode,<wbr/> sensors
-with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-capability would typically perform pixel binning in order to improve low light
+would typically perform pixel binning in order to improve low light
 performance,<wbr/> noise reduction etc.<wbr/> However,<wbr/> in
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>
-mode (supported only
-by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
-sensors),<wbr/> sensors typically operate in unbinned mode allowing for a larger image size.<wbr/>
+mode,<wbr/> sensors typically operate in unbinned mode allowing for a larger image size.<wbr/>
 The stream configurations supported in
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>
 mode are also different from those of
@@ -29350,7 +30595,32 @@
 <code><a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a></code>
 must not be mixed in the same CaptureRequest.<wbr/> In other words,<wbr/> these outputs are
 exclusive to each other.<wbr/>
-This key does not need to be set for reprocess requests.<wbr/></p>
+This key does not need to be set for reprocess requests.<wbr/>
+This key will be be present on devices supporting the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">Camera<wbr/>Metadata#REQUEST_<wbr/>AVAILABLE_<wbr/>CAPABILITIES_<wbr/>ULTRA_<wbr/>HIGH_<wbr/>RESOLUTION_<wbr/>SENSOR</a>
+capability.<wbr/> It may also be present on devices which do not support the aforementioned
+capability.<wbr/> In that case:</p>
+<ul>
+<li>
+<p>The mandatory stream combinations listed in
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
+  would not apply.<wbr/></p>
+</li>
+<li>
+<p>The bayer pattern of {@code RAW} streams when
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>
+  is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.<wbr/></p>
+</li>
+<li>
+<p>The following keys will always be present:</p>
+<ul>
+<li><a href="#static_android.scaler.streamConfigurationMapMaximumResolution">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map<wbr/>Maximum<wbr/>Resolution</a></li>
+<li><a href="#static_android.sensor.info.activeArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a></li>
+<li><a href="#static_android.sensor.info.pixelArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a></li>
+<li><a href="#static_android.sensor.info.preCorrectionActiveArraySizeMaximumResolution">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size<wbr/>Maximum<wbr/>Resolution</a></li>
+</ul>
+</li>
+</ul>
             </td>
           </tr>
 
@@ -34100,7 +35370,8 @@
                     <span class="entry_type_enum_notes"><p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
 better.<wbr/></p>
 <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#limited-level-additional-guaranteed-configurations">CameraDevice#limited-level-additional-guaranteed-configurations</a>
+documentation are guaranteed to be supported.<wbr/></p>
 <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_<wbr/>COMPATIBLE</code> capability,<wbr/> indicating basic
 support for color image capture.<wbr/> The only exception is that the device may
 alternatively support only the <code>DEPTH_<wbr/>OUTPUT</code> capability,<wbr/> if it can only output depth
@@ -34119,7 +35390,8 @@
                     <span class="entry_type_enum_name">FULL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This camera device is capable of supporting advanced imaging applications.<wbr/></p>
 <p>The stream configurations listed in the <code>FULL</code>,<wbr/> <code>LEGACY</code> and <code>LIMITED</code> tables in the
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#full-level-additional-guaranteed-configurations">CameraDevice#full-level-additional-guaranteed-configurations</a>
+documentation are guaranteed to be supported.<wbr/></p>
 <p>A <code>FULL</code> device will support below capabilities:</p>
 <ul>
 <li><code>BURST_<wbr/>CAPTURE</code> capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
@@ -34139,7 +35411,9 @@
                   <li>
                     <span class="entry_type_enum_name">LEGACY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This camera device is running in backward compatibility mode.<wbr/></p>
-<p>Only the stream configurations listed in the <code>LEGACY</code> table in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are supported.<wbr/></p>
+<p>Only the stream configurations listed in the <code>LEGACY</code> table in the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">CameraDevice#legacy-level-guaranteed-configurations</a>
+documentation are supported.<wbr/></p>
 <p>A <code>LEGACY</code> device does not support per-frame control,<wbr/> manual sensor control,<wbr/> manual
 post-processing,<wbr/> arbitrary cropping regions,<wbr/> and has relaxed performance constraints.<wbr/>
 No additional capabilities beyond <code>BACKWARD_<wbr/>COMPATIBLE</code> will ever be listed by a
@@ -34157,7 +35431,9 @@
                     <span class="entry_type_enum_notes"><p>This camera device is capable of YUV reprocessing and RAW data capture,<wbr/> in addition to
 FULL-level capabilities.<wbr/></p>
 <p>The stream configurations listed in the <code>LEVEL_<wbr/>3</code>,<wbr/> <code>RAW</code>,<wbr/> <code>FULL</code>,<wbr/> <code>LEGACY</code> and
-<code>LIMITED</code> tables in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
+<code>LIMITED</code> tables in the
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#level-3-additional-guaranteed-configurations">CameraDevice#level-3-additional-guaranteed-configurations</a>
+documentation are guaranteed to be supported.<wbr/></p>
 <p>The following additional capabilities are guaranteed to be supported:</p>
 <ul>
 <li><code>YUV_<wbr/>REPROCESSING</code> capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
@@ -37226,8 +38502,8 @@
             <td class="entry_description">
               <p>Whether this camera device can support identical set of stream combinations
 involving HEIC image format,<wbr/> compared to the
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">table of combinations</a>
-involving JPEG image format required for the device's hardware level and capabilities.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">table of combinations</a> involving JPEG image format required for the device's hardware
+level and capabilities.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -37258,9 +38534,8 @@
 HEIC streams at the same time is not supported.<wbr/></p>
 <p>If a camera device supports HEIC format (ISO/<wbr/>IEC 23008-12),<wbr/> not only does it
 support the existing mandatory stream
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">combinations</a>
-required for the device's hardware level and capabilities,<wbr/> it also supports swapping
-each JPEG stream with HEIC stream in all guaranteed combinations.<wbr/></p>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">combinations</a> required for the device's hardware level and capabilities,<wbr/> it also
+supports swapping each JPEG stream with HEIC stream in all guaranteed combinations.<wbr/></p>
 <p>For every HEIC stream configured by the application,<wbr/> the camera framework sets up 2
 internal streams with camera HAL:</p>
 <ul>
@@ -38131,6 +39406,674 @@
       </tbody>
 
   <!-- end of section -->
+  <tr><td colspan="7" id="section_extension" class="section">extension</td></tr>
+
+
+      <tr><td colspan="7" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.extension.strength">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>extension.<wbr/>strength
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [fwk_java_public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Strength of the extension post-processing effect</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0 - 100</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>This control allows Camera extension clients to configure the strength of the applied
+extension effect.<wbr/> Strength equal to 0 means that the extension must not apply any
+post-processing and return a regular captured frame.<wbr/> Strength equal to 100 is the
+default level of post-processing applied when the control is not supported or not set
+by the client.<wbr/> Values between 0 and 100 will have different effect depending on the
+extension type as described below:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_BOKEH">BOKEH</a> -
+the strength is expected to control the amount of blur.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_HDR">HDR</a> and
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_NIGHT">NIGHT</a> -
+the strength can control the amount of images fused and the brightness of the final image.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_FACE_RETOUCH">FACE_<wbr/>RETOUCH</a> -
+the strength value will control the amount of cosmetic enhancement and skin
+smoothing.<wbr/></li>
+</ul>
+<p>The control will be supported if the capture request key is part of the list generated by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#getAvailableCaptureRequestKeys">CameraExtensionCharacteristics#getAvailableCaptureRequestKeys</a>.<wbr/>
+The control is only defined and available to clients sending capture requests via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionSession.html">CameraExtensionSession</a>.<wbr/>
+The default value is 100.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.extension.currentType">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>extension.<wbr/>current<wbr/>Type
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [fwk_java_public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Contains the extension type of the currently active extension</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Extension type value listed in
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html">CameraExtensionCharacteristics</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The capture result will only be supported and included by camera extension
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionSession.html">sessions</a>.<wbr/>
+In case the extension session was configured to use
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_AUTOMATIC">AUTO</a>,<wbr/>
+then the extension type value will indicate the currently active extension like
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_HDR">HDR</a>,<wbr/>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_NIGHT">NIGHT</a> etc.<wbr/>
+,<wbr/> and will never return
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_AUTOMATIC">AUTO</a>.<wbr/>
+In case the extension session was configured to use an extension different from
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_AUTOMATIC">AUTO</a>,<wbr/>
+then the result type will always match with the configured extension type.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.extension.strength">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>extension.<wbr/>strength
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [fwk_java_public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Strength of the extension post-processing effect</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0 - 100</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>This control allows Camera extension clients to configure the strength of the applied
+extension effect.<wbr/> Strength equal to 0 means that the extension must not apply any
+post-processing and return a regular captured frame.<wbr/> Strength equal to 100 is the
+default level of post-processing applied when the control is not supported or not set
+by the client.<wbr/> Values between 0 and 100 will have different effect depending on the
+extension type as described below:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_BOKEH">BOKEH</a> -
+the strength is expected to control the amount of blur.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_HDR">HDR</a> and
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_NIGHT">NIGHT</a> -
+the strength can control the amount of images fused and the brightness of the final image.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_FACE_RETOUCH">FACE_<wbr/>RETOUCH</a> -
+the strength value will control the amount of cosmetic enhancement and skin
+smoothing.<wbr/></li>
+</ul>
+<p>The control will be supported if the capture request key is part of the list generated by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#getAvailableCaptureRequestKeys">CameraExtensionCharacteristics#getAvailableCaptureRequestKeys</a>.<wbr/>
+The control is only defined and available to clients sending capture requests via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionSession.html">CameraExtensionSession</a>.<wbr/>
+The default value is 100.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="7" id="section_jpegr" class="section">jpegr</td></tr>
+
+
+      <tr><td colspan="7" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.jpegr.availableJpegRStreamConfigurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RStream<wbr/>Configurations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 4
+                </span>
+              <span class="entry_type_visibility"> [ndk_public as streamConfiguration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OUTPUT (v3.9)</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INPUT (v3.9)</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The available Jpeg/<wbr/>R stream
+configurations that this camera device supports
+(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The configurations are listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code> tuples.<wbr/></p>
+<p>If the camera device supports Jpeg/<wbr/>R,<wbr/> it will support the same stream combinations with
+Jpeg/<wbr/>R as it does with P010.<wbr/> The stream combinations with Jpeg/<wbr/>R (or P010) supported
+by the device is determined by the device's hardware level and capabilities.<wbr/></p>
+<p>All the static,<wbr/> control,<wbr/> and dynamic metadata tags related to JPEG apply to Jpeg/<wbr/>R formats.<wbr/>
+Configuring JPEG and Jpeg/<wbr/>R streams at the same time is not supported.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.jpegr.availableJpegRMinFrameDurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RMin<wbr/>Frame<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the minimum frame duration for each
+format/<wbr/>size combination for Jpeg/<wbr/>R output formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>This should correspond to the frame duration when only that
+stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
+set to either OFF or FAST.<wbr/></p>
+<p>When multiple streams are used in a request,<wbr/> the minimum frame
+duration will be max(individual stream min durations).<wbr/></p>
+<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
+calculating the max frame rate.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.jpegr.availableJpegRStallDurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RStall<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the maximum stall duration for each
+output format/<wbr/>size combination for Jpeg/<wbr/>R streams.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>A stall duration is how much extra time would get added
+to the normal minimum frame duration for a repeating request
+that has streams with non-zero stall.<wbr/></p>
+<p>This functions similarly to
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for Jpeg/<wbr/>R
+streams.<wbr/></p>
+<p>All Jpeg/<wbr/>R output stream formats may have a nonzero stall
+duration.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.jpegr.availableJpegRStreamConfigurationsMaximumResolution">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RStream<wbr/>Configurations<wbr/>Maximum<wbr/>Resolution
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 4
+                </span>
+              <span class="entry_type_visibility"> [ndk_public as streamConfiguration]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OUTPUT (v3.9)</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INPUT (v3.9)</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The available Jpeg/<wbr/>R stream
+configurations that this camera device supports
+(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Refer to <a href="#static_android.jpegr.availableJpegRStreamConfigurations">android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RStream<wbr/>Configurations</a> for details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.jpegr.availableJpegRMinFrameDurationsMaximumResolution">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RMin<wbr/>Frame<wbr/>Durations<wbr/>Maximum<wbr/>Resolution
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the minimum frame duration for each
+format/<wbr/>size combination for Jpeg/<wbr/>R output formats for CaptureRequests where
+<a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Refer to <a href="#static_android.jpegr.availableJpegRMinFrameDurations">android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RMin<wbr/>Frame<wbr/>Durations</a> for details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.jpegr.availableJpegRStallDurationsMaximumResolution">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RStall<wbr/>Durations<wbr/>Maximum<wbr/>Resolution
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [ndk_public as streamConfigurationDuration]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the maximum stall duration for each
+output format/<wbr/>size combination for Jpeg/<wbr/>R streams for CaptureRequests where
+<a href="#controls_android.sensor.pixelMode">android.<wbr/>sensor.<wbr/>pixel<wbr/>Mode</a> is set to
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">Camera<wbr/>Metadata#SENSOR_<wbr/>PIXEL_<wbr/>MODE_<wbr/>MAXIMUM_<wbr/>RESOLUTION</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>9</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Refer to <a href="#static_android.jpegr.availableJpegRStallDurations">android.<wbr/>jpegr.<wbr/>available<wbr/>Jpeg<wbr/>RStall<wbr/>Durations</a> for details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
 <!-- </namespace> -->
   </table>
 
diff --git a/camera/docs/metadata-generate b/camera/docs/metadata-generate
index 3a2a000..38e7b1e 100755
--- a/camera/docs/metadata-generate
+++ b/camera/docs/metadata-generate
@@ -42,7 +42,7 @@
 outdir="$ANDROID_PRODUCT_OUT/obj/ETC/system-media-camera-docs_intermediates"
 ndk_header_dir="$ANDROID_BUILD_TOP/frameworks/av/camera/ndk/include/camera"
 ndk_impl_dir="$ANDROID_BUILD_TOP/frameworks/av/camera/ndk/impl"
-libcameraservice_hidl_dir="$ANDROID_BUILD_TOP/frameworks/av/services/camera/libcameraservice/hidl"
+libcameraservice_aidl_dir="$ANDROID_BUILD_TOP/frameworks/av/services/camera/libcameraservice/aidl"
 device_info_dir="$ANDROID_BUILD_TOP/cts/tools/cts-device-info/"`
         `"src/com/android/cts/deviceinfo"
 out_files=()
@@ -235,7 +235,7 @@
 gen_file camera_metadata_asserts.mako ../src/camera_metadata_asserts.cpp || exit 1
 
 #Generate tags with vndk versions for filtering
-gen_file_abs vndk_camera_metadata_tags.mako "$libcameraservice_hidl_dir/VndkVersionMetadataTags.h" yes || exit 1
+gen_file_abs vndk_camera_metadata_tags.mako "$libcameraservice_aidl_dir/VndkVersionMetadataTags.h" yes || exit 1
 
 #Generate NDK header
 gen_file_abs ndk_camera_metadata_tags.mako "$ndk_header_dir/NdkCameraMetadataTags.h" yes || exit 1
diff --git a/camera/docs/metadata_definitions.xml b/camera/docs/metadata_definitions.xml
index 10f997d..9081ed9 100644
--- a/camera/docs/metadata_definitions.xml
+++ b/camera/docs/metadata_definitions.xml
@@ -152,6 +152,9 @@
     <typedef name="dynamicRangeProfiles">
         <language name="java">android.hardware.camera2.params.DynamicRangeProfiles</language>
     </typedef>
+    <typedef name="colorSpaceProfiles">
+        <language name="java">android.hardware.camera2.params.ColorSpaceProfiles</language>
+    </typedef>
   </types>
 
   <namespace name="android">
@@ -660,7 +663,7 @@
                 An external flash has been turned on.
 
                 It informs the camera device that an external flash has been turned on, and that
-                metering (and continuous focus if active) should be quickly recaculated to account
+                metering (and continuous focus if active) should be quickly recalculated to account
                 for the external flash. Otherwise, this mode acts like ON.
 
                 When the external flash is turned off, AE mode should be changed to one of the
@@ -784,7 +787,9 @@
 
               For camera devices with the
               {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-              capability,
+              capability or devices where
+              {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+              lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}
               android.sensor.info.activeArraySizeMaximumResolution /
               android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the
               coordinate system for requests where android.sensor.pixelMode is set to
@@ -1118,7 +1123,10 @@
 
               For camera devices with the
               {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-              capability, android.sensor.info.activeArraySizeMaximumResolution /
+              capability or devices where
+              {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+              lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode},
+              android.sensor.info.activeArraySizeMaximumResolution /
               android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the
               coordinate system for requests where android.sensor.pixelMode is set to
               {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
@@ -1475,7 +1483,10 @@
 
               For camera devices with the
               {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-              capability, android.sensor.info.activeArraySizeMaximumResolution /
+              capability or devices where
+              {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+              lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode},
+              android.sensor.info.activeArraySizeMaximumResolution /
               android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the
               coordinate system for requests where android.sensor.pixelMode is set to
               {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
@@ -3018,7 +3029,7 @@
           </details>
         </entry>
         <entry name="postRawSensitivityBoostRange" type="int32" visibility="public"
-            type_notes="Range of supported post RAW sensitivitiy boosts"
+            type_notes="Range of supported post RAW sensitivity boosts"
             container="array" typedef="rangeInt">
           <array>
             <size>2</size>
@@ -3541,7 +3552,263 @@
             android.control.awbRegions.
           </details>
         </entry>
+        <entry name="settingsOverride" type="int32" visibility="public" enum="true"
+            hal_version="3.9">
+          <enum>
+            <value>OFF
+              <notes>
+                No keys are applied sooner than the other keys when applying CaptureRequest
+                settings to the camera device. This is the default value.
+              </notes>
+            </value>
+            <value>ZOOM
+              <notes>
+                Zoom related keys are applied sooner than the other keys in the CaptureRequest. The
+                zoom related keys are:
+
+                * android.control.zoomRatio
+                * android.scaler.cropRegion
+                * android.control.aeRegions
+                * android.control.awbRegions
+                * android.control.afRegions
+
+                Even though android.control.aeRegions, android.control.awbRegions,
+                and android.control.afRegions are not directly zoom related, applications
+                typically scale these regions together with android.scaler.cropRegion to have a
+                consistent mapping within the current field of view. In this aspect, they are
+                related to android.scaler.cropRegion and android.control.zoomRatio.
+              </notes>
+            </value>
+            <value visibility="hidden" id="0x4000">VENDOR_START
+              <notes>
+                Vendor defined settingsOverride. These depend on vendor implementation.
+              </notes>
+            </value>
+          </enum>
+          <description>The desired CaptureRequest settings override with which certain keys are
+            applied earlier so that they can take effect sooner.
+          </description>
+          <range>android.control.availableSettingsOverrides</range>
+          <details>
+            There are some CaptureRequest keys which can be applied earlier than others
+            when controls within a CaptureRequest aren't required to take effect at the same time.
+            One such example is zoom. Zoom can be applied at a later stage of the camera pipeline.
+            As soon as the camera device receives the CaptureRequest, it can apply the requested
+            zoom value onto an earlier request that's already in the pipeline, thus improves zoom
+            latency.
+
+            This key's value in the capture result reflects whether the controls for this capture
+            are overridden "by" a newer request. This means that if a capture request turns on
+            settings override, the capture result of an earlier request will contain the key value
+            of ZOOM. On the other hand, if a capture request has settings override turned on,
+            but all newer requests have it turned off, the key's value in the capture result will
+            be OFF because this capture isn't overridden by a newer capture. In the two examples
+            below, the capture results columns illustrate the settingsOverride values in different
+            scenarios.
+
+            Assuming the zoom settings override can speed up by 1 frame, below example illustrates
+            the speed-up at the start of capture session:
+
+                Camera session created
+                Request 1 (zoom=1.0x, override=ZOOM) ->
+                Request 2 (zoom=1.2x, override=ZOOM) ->
+                Request 3 (zoom=1.4x, override=ZOOM) ->  Result 1 (zoom=1.2x, override=ZOOM)
+                Request 4 (zoom=1.6x, override=ZOOM) ->  Result 2 (zoom=1.4x, override=ZOOM)
+                Request 5 (zoom=1.8x, override=ZOOM) ->  Result 3 (zoom=1.6x, override=ZOOM)
+                                                     ->  Result 4 (zoom=1.8x, override=ZOOM)
+                                                     ->  Result 5 (zoom=1.8x, override=OFF)
+
+            The application can turn on settings override and use zoom as normal. The example
+            shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom
+            values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4).
+
+            The application must make sure the settings override doesn't interfere with user
+            journeys requiring simultaneous application of all controls in CaptureRequest on the
+            requested output targets. For example, if the application takes a still capture using
+            CameraCaptureSession#capture, and the repeating request immediately sets a different
+            zoom value using override, the inflight still capture could have its zoom value
+            overwritten unexpectedly.
+
+            So the application is strongly recommended to turn off settingsOverride when taking
+            still/burst captures, and turn it back on when there is only repeating viewfinder
+            request and no inflight still/burst captures.
+
+            Below is the example demonstrating the transitions in and out of the
+            settings override:
+
+                Request 1 (zoom=1.0x, override=OFF)
+                Request 2 (zoom=1.2x, override=OFF)
+                Request 3 (zoom=1.4x, override=ZOOM)  -> Result 1 (zoom=1.0x, override=OFF)
+                Request 4 (zoom=1.6x, override=ZOOM)  -> Result 2 (zoom=1.4x, override=ZOOM)
+                Request 5 (zoom=1.8x, override=OFF)   -> Result 3 (zoom=1.6x, override=ZOOM)
+                                                      -> Result 4 (zoom=1.6x, override=OFF)
+                                                      -> Result 5 (zoom=1.8x, override=OFF)
+
+            This example shows that:
+
+            * The application "ramps in" settings override by setting the control to ZOOM.
+            In the example, request #3 enables zoom settings override. Because the camera device
+            can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the
+            value specified in request #3.
+            * The application "ramps out" of settings override by setting the control to OFF. In
+            the example, request #5 changes the override to OFF. Because request #4's zoom
+            takes effect in result #3, result #4's zoom remains the same until new value takes
+            effect in result #5.
+          </details>
+          <hal_details>
+            The HAL must set this value to OFF in all of the Capture templates.
+
+            Typically the HAL unblocks processCaptureRequest at the same rate as the sensor capture,
+            and the HAL queues the capture settings in its pipeline when processCaptureRequest
+            returns. However, when the settings override is enabled, the HAL can optimize the
+            overridden controls' latency by applying them as soon as processCaptureRequest is
+            called, rather than when it's unblocked.
+
+            If zoom settings override is on, when zooming in, the HAL must be able to apply the
+            zoom related settings at least 1 frame ahead.
+          </hal_details>
+        </entry>
     </controls>
+    <static>
+      <entry name="availableSettingsOverrides" type="int32" visibility="public"
+        optional="true" type_notes="list of enums" container="array" typedef="enumList"
+        hal_version="3.9">
+        <array>
+          <size>n</size>
+        </array>
+        <description>List of available settings overrides supported by the camera device that can
+        be used to speed up certain controls.
+        </description>
+        <range>Any value listed in android.control.settingsOverride</range>
+        <details>When not all controls within a CaptureRequest are required to take effect
+          at the same time on the outputs, the camera device may apply certain request keys sooner
+          to improve latency. This list contains such supported settings overrides. Each settings
+          override corresponds to a set of CaptureRequest keys that can be sped up when applying.
+
+          A supported settings override can be passed in via
+          {@link android.hardware.camera2.CaptureRequest#CONTROL_SETTINGS_OVERRIDE}, and the
+          CaptureRequest keys corresponding to the override are applied as soon as possible, not
+          bound by per-frame synchronization. See android.control.settingsOverride for the
+          CaptureRequest keys for each override.
+
+          OFF is always included in this list.
+        </details>
+      </entry>
+    </static>
+    <dynamic>
+      <clone entry="android.control.settingsOverride" kind="controls">
+      </clone>
+      <entry name="settingsOverridingFrameNumber" type="int32" visibility="system"
+        hal_version="3.9">
+        <description>The frame number of the newer request overriding this capture.
+        </description>
+        <details>Must be equal to the frame number of the result if this capture isn't
+        overridden by a newer request, i.e. if android.control.settingsOverride is OFF
+        in the capture result. On the other hand, if the capture is overridden by a newer
+        request, the value of this tag (unsigned) must be larger than the frame number of
+        the capture result.</details>
+      </entry>
+    </dynamic>
+    <controls>
+      <entry name="autoframing" type="byte" visibility="public"
+          enum="true" hwlevel="limited" hal_version="3.9">
+        <enum>
+          <value>OFF
+            <notes>
+              Disable autoframing.
+            </notes>
+          </value>
+          <value>ON
+            <notes>
+              Enable autoframing to keep people in the frame's field of view.
+            </notes>
+          </value>
+          <value visibility="hidden">AUTO
+            <notes>
+              Automatically select ON or OFF based on the system level preferences.
+            </notes>
+          </value>
+        </enum>
+        <description>
+          Automatic crop, pan and zoom to keep objects in the center of the frame.
+        </description>
+        <details>
+          Auto-framing is a special mode provided by the camera device to dynamically crop, zoom
+          or pan the camera feed to try to ensure that the people in a scene occupy a reasonable
+          portion of the viewport. It is primarily designed to support video calling in
+          situations where the user isn't directly in front of the device, especially for
+          wide-angle cameras.
+          android.scaler.cropRegion and android.control.zoomRatio in CaptureResult will be used
+          to denote the coordinates of the auto-framed region.
+          Zoom and video stabilization controls are disabled when auto-framing is enabled. The 3A
+          regions must map the screen coordinates into the scaler crop returned from the capture
+          result instead of using the active array sensor.
+        </details>
+        <hal_details>
+          While auto-framing is ON, the aspect ratio of the auto-framed region must match the
+          aspect ratio of the configured output stream.
+          When reporting CaptureResult, SCALER_CROP_REGION might not adequately describe the
+          actual sensor pixels. In this case, it is acceptable for the returned parameters to only
+          be an approximation of the image sensor region that is actually used.
+          When auto-framing is turned off, the transition should be immediate, with no panning or
+          zooming to transition to the settings requested by the app. When it is turned on, the
+          transition can be immediate or smooth.
+        </hal_details>
+      </entry>
+    </controls>
+    <static>
+      <entry name="autoframingAvailable" type="byte" visibility="public" enum="true"
+          typedef="boolean" hwlevel="limited" hal_version="3.9">
+        <enum>
+          <value>FALSE</value>
+          <value>TRUE</value>
+        </enum>
+        <description>Whether the camera device supports android.control.autoframing.
+        </description>
+        <details>
+          Will be `false` if auto-framing is not available.
+        </details>
+      </entry>
+    </static>
+    <dynamic>
+      <clone entry="android.control.autoframing" kind="controls">
+      </clone>
+      <entry name="autoframingState" type="byte" visibility="public"
+          enum="true" hwlevel="limited" hal_version="3.9">
+        <enum>
+          <value>INACTIVE
+            <notes>
+              Auto-framing is inactive.
+            </notes>
+          </value>
+          <value>FRAMING
+            <notes>
+              Auto-framing is in process - either zooming in, zooming out or pan is taking place.
+            </notes>
+          </value>
+          <value>CONVERGED
+            <notes>
+              Auto-framing has reached a stable state (frame/fov is not being adjusted). The state
+              may transition back to FRAMING if the scene changes.
+            </notes>
+          </value>
+        </enum>
+        <description>Current state of auto-framing.
+        </description>
+        <details>
+          When the camera doesn't have auto-framing available (i.e
+          `android.control.autoframingAvailable` == false) or it is not enabled (i.e
+          `android.control.autoframing` == OFF), the state will always be INACTIVE.
+          Other states indicate the current auto-framing state:
+
+          * When `android.control.autoframing` is set to ON, auto-framing will take
+          place. While the frame is aligning itself to center the object (doing things like
+          zooming in, zooming out or pan), the state will be FRAMING.
+          * When field of view is not being adjusted anymore and has reached a stable state, the
+          state will be CONVERGED.
+        </details>
+      </entry>
+    </dynamic>
     </section>
     <section name="demosaic">
       <controls>
@@ -3769,6 +4036,11 @@
               {@link android.hardware.camera2.CameraManager#turnOnTorchWithStrengthLevel}.
               If this value is equal to 1, flashlight brightness control is not supported.
               The value for this key will be null for devices with no flash unit.
+
+              The maximum value is guaranteed to be safe to use for an indefinite duration in
+              terms of device flashlight lifespan, but may be too bright for comfort for many
+              use cases. Use the default torch brightness value to avoid problems with an
+              over-bright flashlight.
             </details>
           </entry>
           <entry name="strengthDefaultLevel" type="int32" visibility="public" hal_version="3.8">
@@ -6292,7 +6564,8 @@
 
               * Profile {@link android.hardware.camera2.params.DynamicRangeProfiles#HLG10}
               * All mandatory stream combinations for this specific capability as per
-                documentation {@link android.hardware.camera2.CameraDevice#createCaptureSession}
+                documentation
+                {@link android.hardware.camera2.CameraDevice#10-bit-output-additional-guaranteed-configurations}
               * In case the device is not able to capture some combination of supported
                 standard 8-bit and/or 10-bit dynamic range profiles within the same capture request,
                 then those constraints must be listed in
@@ -6330,9 +6603,28 @@
               {@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES}
               lists all of the supported stream use cases.
 
-              Refer to {@link android.hardware.camera2.CameraDevice#createCaptureSession} for the
-              mandatory stream combinations involving stream use cases, which can also be queried
-              via {@link android.hardware.camera2.params.MandatoryStreamCombination}.
+              Refer to
+              {@link android.hardware.camera2.CameraDevice#stream-use-case-capability-additional-guaranteed-configurations}
+              for the mandatory stream combinations involving stream use cases, which can also be
+              queried via {@link android.hardware.camera2.params.MandatoryStreamCombination}.
+              </notes>
+            </value>
+            <value optional="true" visibility="java_public" hal_version="3.9">COLOR_SPACE_PROFILES
+              <notes>
+                The device supports querying the possible combinations of color spaces, image
+                formats, and dynamic range profiles supported by the camera and requesting a
+                particular color space for a session via
+                {@link android.hardware.camera2.params.SessionConfiguration#setColorSpace}.
+
+                Cameras that enable this capability may or may not also implement dynamic range
+                profiles. If they don't,
+                {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedDynamicRangeProfiles}
+                will return only
+                {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD} and
+                {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedColorSpacesForDynamicRange}
+                will assume support of the
+                {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD}
+                profile in all combinations of color spaces and image formats.
               </notes>
             </value>
           </enum>
@@ -6870,6 +7162,96 @@
           {@link android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile}.
           </details>
         </entry>
+        <entry name="availableColorSpaceProfiles" type="int32" visibility="java_public"
+            synthetic="true" optional="true" typedef="colorSpaceProfiles" hal_version="3.9">
+          <description>
+            An interface for querying the color space profiles supported by a camera device.
+          </description>
+          <details>
+            A color space profile is a combination of a color space, an image format, and a dynamic
+            range profile. Camera clients can retrieve the list of supported color spaces by calling
+            {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedColorSpaces} or
+            {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedColorSpacesForDynamicRange}.
+            If a camera does not support the
+            {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT}
+            capability, the dynamic range profile will always be
+            {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD}. Color space
+            capabilities are queried in combination with an {@link android.graphics.ImageFormat}.
+            If a camera client wants to know the general color space capabilities of a camera device
+            regardless of image format, it can specify {@link android.graphics.ImageFormat#UNKNOWN}.
+            The color space for a session can be configured by setting the SessionConfiguration
+            color space via {@link android.hardware.camera2.params.SessionConfiguration#setColorSpace}.
+          </details>
+        </entry>
+        <entry name="availableColorSpaceProfilesMap" type="int64" visibility="ndk_public"
+            optional="true" enum="true" container="array" hal_version="3.9">
+          <array>
+            <size>n</size>
+            <size>3</size>
+          </array>
+          <enum>
+            <value id="-1">UNSPECIFIED
+            <notes>
+              Default value, when not explicitly specified. The Camera device will choose the color
+              space to employ.
+            </notes>
+            </value>
+            <value visibility="system" id="0">SRGB
+            <notes>
+              RGB color space sRGB standardized as IEC 61966-2.1:1999.
+            </notes>
+            </value>
+            <value visibility="system" id="7">DISPLAY_P3
+            <notes>
+              RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.
+            </notes>
+            </value>
+            <value visibility="system" id="16">BT2020_HLG
+            <notes>
+              RGB color space BT.2100 standardized as Hybrid Log Gamma encoding.
+            </notes>
+            </value>
+          </enum>
+          <description>
+            A list of all possible color space profiles supported by a camera device.
+          </description>
+          <details>
+            A color space profile is a combination of a color space, an image format, and a dynamic range
+            profile. If a camera does not support the
+            {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT}
+            capability, the dynamic range profile will always be
+            {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD}. Camera clients can
+            use {@link android.hardware.camera2.params.SessionConfiguration#setColorSpace} to select
+            a color space.
+          </details>
+          <hal_details>
+            The array contains three entries per supported profile:
+
+            1) The supported color space.
+            2) An image format which can be used with this color space.
+            3) A bitmap of all compatible dynamic range profiles, if the device is HDR-capable.
+
+            The possible values for #1 are the positive values of the
+            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_* enum, which is equivalent to
+            {@link android.graphics.ColorSpace.Named} and its ordinals. UNSPECIFIED should not be
+            used here. It should be noted that not all {@link android.graphics.ColorSpace.Named}
+            values are supported, only those in the
+            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_* enum.
+
+            The possible values for #2 consist of the public-facing image/pixel formats, found at
+            {@link android.graphics.ImageFormat} and {@link android.graphics.PixelFormat}. Each map
+            to a HAL pixel format except for {@link android.graphics.ImageFormat.JPEG},
+            {@link android.graphics.ImageFormat.HEIC}, and
+            {@link android.graphics.ImageFormat.DEPTH_JPEG}. Depth formats besides DEPTH_JPEG are
+            not applicable and should not be specified. If there are no constraints on the type of
+            image format a color space is compatible with, this can be
+            {@link android.graphics.ImageFormat.UNKNOWN}.
+
+            If the device is not HDR-capable, #3 should always be
+            ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_STANDARD. Otherwise, #3 should be a
+            bitmap of the compatible ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_* values.
+          </hal_details>
+        </entry>
       </static>
     </section>
     <section name="scaler">
@@ -6903,9 +7285,9 @@
             if necessary to maintain the stream's aspect ratio, then scaling the sensor input to
             match the output's configured resolution.
 
-            The crop region is applied after the RAW to other color space (e.g. YUV)
-            conversion. Since raw streams (e.g. RAW16) don't have the conversion stage, they are not
-            croppable. The crop region will be ignored by raw streams.
+            The crop region is usually applied after the RAW to other color space (e.g. YUV)
+            conversion. As a result RAW streams are not croppable unless supported by the
+            camera device. See android.scaler.availableStreamUseCases#CROPPED_RAW for details.
 
             For non-raw streams, any additional per-stream cropping will be done to maximize the
             final pixel area of the stream.
@@ -6985,7 +7367,10 @@
 
             For camera devices with the
             {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            capability, android.sensor.info.activeArraySizeMaximumResolution /
+            capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+            lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}
+
+            android.sensor.info.activeArraySizeMaximumResolution /
             android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the
             coordinate system for requests where android.sensor.pixelMode is set to
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
@@ -7718,9 +8103,9 @@
           stream configurations are the same as for applications targeting SDK version older than
           31.
 
-          Refer to android.request.availableCapabilities and {@link
-          android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
-          stream configurations on a per-capability basis.
+          Refer to android.request.availableCapabilities and
+          {@link android.hardware.camera2.CameraDevice#legacy-level-guaranteed-configurations}
+          for additional mandatory stream configurations on a per-capability basis.
 
           *1: For JPEG format, the sizes may be restricted by below conditions:
 
@@ -7780,7 +8165,7 @@
           Note that for Performance Class 12 or higher primary cameras (first rear/front facing
           camera in the camera ID list), camera framework filters out JPEG sizes smaller than
           1080p depending on applications' targetSdkLevel. The camera HAL must still support the
-          smaller JPEG sizes to maintain backward comopatibility.
+          smaller JPEG sizes to maintain backward compatibility.
 
           For LIMITED capability devices
           (`android.info.supportedHardwareLevel == LIMITED`),
@@ -8032,14 +8417,15 @@
           {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
           and {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}.
           This is an app-readable conversion of the mandatory stream combination
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession tables}.
+          {@link android.hardware.camera2.CameraDevice#legacy-level-guaranteed-configurations
+          tables}.
           </description>
           <details>
           The array of
           {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is
           generated according to the documented
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} based on
-          specific device level and capabilities.
+          {@link android.hardware.camera2.CameraDevice#legacy-level-guaranteed-configurations
+          guideline} based on specific device level and capabilities.
           Clients can use the array as a quick reference to find an appropriate camera stream
           combination.
           As per documentation, the stream combinations with given PREVIEW, RECORD and
@@ -8066,14 +8452,15 @@
           <description>
           An array of mandatory concurrent stream combinations.
           This is an app-readable conversion of the concurrent mandatory stream combination
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession tables}.
+          {@link android.hardware.camera2.CameraDevice#concurrent-stream-guaranteed-configurations
+          tables}.
           </description>
           <details>
           The array of
           {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is
           generated according to the documented
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} for each
-          device which has its Id present in the set returned by
+          {@link android.hardware.camera2.CameraDevice#concurrent-stream-guaranteed-configurations
+          guideline} for each device which has its Id present in the set returned by
           {@link android.hardware.camera2.CameraManager#getConcurrentCameraIds}.
           Clients can use the array as a quick reference to find an appropriate camera stream
           combination.
@@ -8333,7 +8720,8 @@
           If a camera device supports multi-resolution output streams for a particular format, for
           each of its mandatory stream combinations, the camera device will support using a
           MultiResolutionImageReader for the MAXIMUM stream of supported formats. Refer to
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession} for additional details.
+          {@link android.hardware.camera2.CameraDevice#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs}
+          for additional details.
 
           To use multi-resolution input streams, the supported formats can be queried by {@link
           android.hardware.camera2.params.MultiResolutionStreamConfigurationMap#getInputFormats}.
@@ -8346,8 +8734,8 @@
           {@code YUV} output, or multi-resolution {@code PRIVATE} input and multi-resolution
           {@code PRIVATE} output, {@code JPEG} and {@code YUV} are guaranteed to be supported
           multi-resolution output stream formats. Refer to
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession} for
-          details about the additional mandatory stream combinations in this case.
+          {@link android.hardware.camera2.CameraDevice#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs}}
+          for details about the additional mandatory stream combinations in this case.
           </details>
           <hal_details>
           Do not set this property directly
@@ -8485,14 +8873,15 @@
           {@link android.hardware.camera2.CaptureRequest} has android.sensor.pixelMode set
           to {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
           This is an app-readable conversion of the maximum resolution mandatory stream combination
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession tables}.
+          {@link android.hardware.camera2.CameraDevice#additional-guaranteed-combinations-for-ultra-high-resolution-sensors
+          tables}.
           </description>
           <details>
           The array of
           {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is
           generated according to the documented
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} for each
-          device which has the
+          {@link android.hardware.camera2.CameraDevice#additional-guaranteed-combinations-for-ultra-high-resolution-sensors
+          guideline} for each device which has the
           {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
           capability.
           Clients can use the array as a quick reference to find an appropriate camera stream
@@ -8517,14 +8906,15 @@
           10-bit output capability
           {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT}
           This is an app-readable conversion of the 10 bit output mandatory stream combination
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession tables}.
+          {@link android.hardware.camera2.CameraDevice#10-bit-output-additional-guaranteed-configurations
+          tables}.
           </description>
           <details>
           The array of
           {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is
           generated according to the documented
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} for each
-          device which has the
+          {@link android.hardware.camera2.CameraDevice#10-bit-output-additional-guaranteed-configurations
+          guideline} for each device which has the
           {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT}
           capability.
           Clients can use the array as a quick reference to find an appropriate camera stream
@@ -8548,14 +8938,16 @@
           An array of mandatory stream combinations which are applicable when device lists
           {@code PREVIEW_STABILIZATION} in android.control.availableVideoStabilizationModes.
           This is an app-readable conversion of the preview stabilization mandatory stream
-          combination {@link android.hardware.camera2.CameraDevice#createCaptureSession tables}.
+          combination
+          {@link android.hardware.camera2.CameraDevice#preview-stabilization-guaranteed-stream-configurations
+          tables}.
           </description>
           <details>
           The array of
           {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is
           generated according to the documented
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} for each
-          device which supports {@code PREVIEW_STABILIZATION}
+          {@link android.hardware.camera2.CameraDevice#preview-stabilization-guaranteed-stream-configurations
+          guideline} for each device which supports {@code PREVIEW_STABILIZATION}
           Clients can use the array as a quick reference to find an appropriate camera stream
           combination.
           The mandatory stream combination array will be {@code null} in case the device does not
@@ -8723,6 +9115,33 @@
               variable frame rate settings to allow sufficient exposure time in low light.
             </notes>
             </value>
+            <value optional="true" id="0x6" hal_version="3.9">CROPPED_RAW
+            <notes>
+              Cropped RAW stream when the client chooses to crop the field of view.
+
+              Certain types of image sensors can run in binned modes in order to improve signal to
+              noise ratio while capturing frames. However, at certain zoom levels and / or when
+              other scene conditions are deemed fit, the camera sub-system may choose to un-bin and
+              remosaic the sensor's output. This results in a RAW frame which is cropped in field
+              of view and yet has the same number of pixels as full field of view RAW, thereby
+              improving image detail.
+
+              The resultant field of view of the RAW stream will be greater than or equal to
+              croppable non-RAW streams. The effective crop region for this RAW stream will be
+              reflected in the CaptureResult key android.scaler.rawCropRegion.
+
+              If this stream use case is set on a non-RAW stream, i.e. not one of :
+
+              * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR}
+              * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}
+              * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}
+
+              session configuration is not guaranteed to succeed.
+
+
+              This stream use case may not be supported on some devices.
+            </notes>
+            </value>
             <value optional="true" visibility="hidden" id="0x10000">VENDOR_START
             <notes>
               Vendor defined use cases. These depend on the vendor implementation.
@@ -8760,8 +9179,9 @@
             The guaranteed stream combinations related to stream use case for a camera device with
             {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE}
             capability is documented in the camera device
-            {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline}. The
-            application is strongly recommended to use one of the guaranteed stream combinations.
+            {@link android.hardware.camera2.CameraDevice#stream-use-case-capability-additional-guaranteed-configurations
+            guideline}. The application is strongly recommended to use one of the guaranteed stream
+            combinations.
             If the application creates a session with a stream combination not in the guaranteed
             list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
             the camera device may ignore some stream use cases due to hardware constraints
@@ -8785,15 +9205,15 @@
           <description>
           An array of mandatory stream combinations with stream use cases.
           This is an app-readable conversion of the mandatory stream combination
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession tables} with
-          each stream's use case being set.
+          {@link android.hardware.camera2.CameraDevice#stream-use-case-capability-additional-guaranteed-configurations
+          tables} with each stream's use case being set.
           </description>
           <details>
           The array of
           {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is
           generated according to the documented
-          {@link android.hardware.camera2.CameraDevice#createCaptureSession guideline} for a
-          camera device with
+          {@link android.hardware.camera2.CameraDevice#stream-use-case-capability-additional-guaranteed-configurations
+          guideline} for a camera device with
           {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE}
           capability.
           The mandatory stream combination array will be {@code null} in case the device doesn't
@@ -8807,6 +9227,59 @@
           </hal_details>
         </entry>
       </static>
+      <dynamic>
+        <entry name="rawCropRegion" type="int32" visibility="public" container="array"
+          typedef="rectangle" hal_version="3.9">
+          <array>
+            <size>4</size>
+          </array>
+          <description>
+          The region of the sensor that corresponds to the RAW read out for this
+          capture when the stream use case of a RAW stream is set to CROPPED_RAW.
+          </description>
+          <units>Pixel coordinates relative to
+          android.sensor.info.activeArraySize or
+          android.sensor.info.preCorrectionActiveArraySize depending on distortion correction
+          capability and mode</units>
+          <details>
+          The coordinate system follows that of android.sensor.info.preCorrectionActiveArraySize.
+
+          This CaptureResult key will be set when the corresponding CaptureRequest has a RAW target
+          with stream use case set to
+          {@link android.hardware.camera2.CameraMetadata#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW},
+          otherwise it will be {@code null}.
+          The value of this key specifies the region of the sensor used for the RAW capture and can
+          be used to calculate the corresponding field of view of RAW streams.
+          This field of view will always be >= field of view for (processed) non-RAW streams for the
+          capture. Note: The region specified may not necessarily be centered.
+
+          For example: Assume a camera device has a pre correction active array size of
+          {@code {0, 0, 1500, 2000}}. If the RAW_CROP_REGION is {@code {500, 375, 1500, 1125}}, that
+          corresponds to a centered crop of 1/4th of the full field of view RAW stream.
+
+          The metadata keys which describe properties of RAW frames:
+
+          * android.statistics.hotPixelMap
+          * android.statistics.lensShadingCorrectionMap
+          * android.lens.distortion
+          * android.lens.poseTranslation
+          * android.lens.poseRotation
+          * android.lens.distortion
+          * android.lens.intrinsicCalibration
+
+          should be interpreted in the effective after raw crop field-of-view coordinate system.
+          In this coordinate system,
+          {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+          the top left corner of the cropped RAW frame and
+          {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+          the bottom right corner. Client applications must use the values of the keys
+          in the CaptureResult metadata if present.
+
+          Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+          use the android.sensor.info.activeArraySize coordinate system as usual.
+          </details>
+        </entry>
+      </dynamic>
     </section>
     <section name="sensor">
       <controls>
@@ -9351,7 +9824,8 @@
             counterparts.
             This key will only be present for devices which advertise the
             {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            capability.
+            capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+            lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}
             </details>
             <ndk_details>
             The data representation is `int[4]`, which maps to `(left, top, width, height)`.
@@ -9383,7 +9857,8 @@
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
             This key will only be present for devices which advertise the
             {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            capability.
+            capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+            lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}
             </details>
             <tag id="RAW" />
           </entry>
@@ -9407,7 +9882,8 @@
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
             This key will only be present for devices which advertise the
             {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            capability.
+            capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+            lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}
             </details>
             <ndk_details>
             The data representation is `int[4]`, which maps to `(left, top, width, height)`.
@@ -9438,13 +9914,23 @@
             performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
             3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
             the same color filter.
+            In case the device has the
+            {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
+            capability :
 
-            This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
-            will have a regular bayer pattern.
+            * This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW
+              images will have a regular bayer pattern.
 
-            This key will not be present for sensors which don't have the
-            {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            capability.
+            In case the device does not have the
+            {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
+            capability :
+
+            * This key will be present if
+              {@link CameraCharacteristics#getAvailableCaptureRequestKeys}
+              lists {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}, since RAW
+              images may not necessarily have a regular bayer pattern when
+              {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode} is set to
+              {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
           </details>
         </entry>
         </namespace>
@@ -10500,14 +10986,11 @@
                hal_version="3.6">
           <enum>
             <value>DEFAULT
-            <notes> This is the default sensor pixel mode. This is the only sensor pixel mode
-              supported unless a camera device advertises
-              {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}.
+            <notes> This is the default sensor pixel mode.
               </notes>
             </value>
             <value>MAXIMUM_RESOLUTION
-            <notes> This sensor pixel mode is offered by devices with capability
-              {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}.
+            <notes>
               In this mode, sensors typically do not bin pixels, as a result can offer larger
               image sizes.
             </notes>
@@ -10523,13 +11006,10 @@
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} mode.
             When operating in
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} mode, sensors
-            with {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            capability would typically perform pixel binning in order to improve low light
+            would typically perform pixel binning in order to improve low light
             performance, noise reduction etc. However, in
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}
-            mode (supported only
-            by {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
-            sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+            mode, sensors typically operate in unbinned mode allowing for a larger image size.
             The stream configurations supported in
             {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}
             mode are also different from those of
@@ -10544,6 +11024,25 @@
             must not be mixed in the same CaptureRequest. In other words, these outputs are
             exclusive to each other.
             This key does not need to be set for reprocess requests.
+            This key will be be present on devices supporting the
+            {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR}
+            capability. It may also be present on devices which do not support the aforementioned
+            capability. In that case:
+
+            * The mandatory stream combinations listed in
+              {@link android.hardware.camera2.CameraCharacteristics.mandatoryMaximumResolutionStreamCombinations}
+              would not apply.
+
+            * The bayer pattern of {@code RAW} streams when
+              {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}
+              is selected will be the one listed in {@link android.sensor.info.binningFactor}.
+
+            * The following keys will always be present:
+
+                * android.scaler.streamConfigurationMapMaximumResolution
+                * android.sensor.info.activeArraySizeMaximumResolution
+                * android.sensor.info.pixelArraySizeMaximumResolution
+                * android.sensor.info.preCorrectionActiveArraySizeMaximumResolution
           </details>
         </entry>
       </controls>
@@ -10575,6 +11074,53 @@
           </details>
         </entry>
       </dynamic>
+      <static>
+        <entry name="readoutTimestamp" type="byte" visibility="fwk_java_public"
+          enum="true" hwlevel="legacy" hal_version="3.8">
+        <enum>
+          <value>NOT_SUPPORTED
+            <notes>This camera device doesn't support readout timestamp and onReadoutStarted
+              callback.
+            </notes>
+          </value>
+          <value>HARDWARE
+            <notes>This camera device supports the onReadoutStarted callback as well as outputting
+              readout timestamps. The readout timestamp is generated by the camera hardware and it
+              has the same accuracy and timing characteristics of the start-of-exposure time.
+            </notes>
+          </value>
+        </enum>
+        <description>Whether or not the camera device supports readout timestamp and
+          {@code onReadoutStarted} callback.</description>
+        <details>
+          If this tag is {@code HARDWARE}, the camera device calls
+          {@link CameraCaptureSession.CaptureCallback#onReadoutStarted} in addition to the
+          {@link CameraCaptureSession.CaptureCallback#onCaptureStarted} callback for each capture.
+          The timestamp passed into the callback is the start of camera image readout rather than
+          the start of the exposure. The timestamp source of
+          {@link CameraCaptureSession.CaptureCallback#onReadoutStarted} is the same as that of
+          {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}.
+
+          In addition, the application can switch an output surface's timestamp from start of
+          exposure to start of readout by calling
+          {@link android.hardware.camera2.params.OutputConfiguration#setReadoutTimestampEnabled}.
+
+          The readout timestamp is beneficial for video recording, because the encoder favors
+          uniform timestamps, and the readout timestamps better reflect the cadence camera sensors
+          output data.
+
+          Note that the camera device produces the start-of-exposure and start-of-readout callbacks
+          together. As a result, the {@link CameraCaptureSession.CaptureCallback#onReadoutStarted}
+          is called right after {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}. The
+          difference in start-of-readout and start-of-exposure is the sensor exposure time, plus
+          certain constant offset. The offset is usually due to camera sensor level crop, and it is
+          generally constant over time for the same set of output resolutions and capture settings.
+        </details>
+        <hal_details>
+          This property is populated by the camera framework and must not be set at the HAL layer.
+        </hal_details>
+      </entry>
+    </static>
     </section>
     <section name="shading">
       <controls>
@@ -11887,8 +12433,8 @@
               better.
 
               Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
-              {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
-              createCaptureSession} documentation are guaranteed to be supported.
+              {@link android.hardware.camera2.CameraDevice#limited-level-additional-guaranteed-configurations|ACameraDevice_createCaptureSession}
+              documentation are guaranteed to be supported.
 
               All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
               support for color image capture. The only exception is that the device may
@@ -11914,8 +12460,8 @@
               This camera device is capable of supporting advanced imaging applications.
 
               The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
-              {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
-              createCaptureSession} documentation are guaranteed to be supported.
+              {@link android.hardware.camera2.CameraDevice#full-level-additional-guaranteed-configurations|ACameraDevice_createCaptureSession}
+              documentation are guaranteed to be supported.
 
               A `FULL` device will support below capabilities:
 
@@ -11939,9 +12485,9 @@
               <notes>
               This camera device is running in backward compatibility mode.
 
-              Only the stream configurations listed in the `LEGACY` table in the {@link
-              android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
-              createCaptureSession} documentation are supported.
+              Only the stream configurations listed in the `LEGACY` table in the
+              {@link android.hardware.camera2.CameraDevice#legacy-level-guaranteed-configurations|ACameraDevice_createCaptureSession}
+              documentation are supported.
 
               A `LEGACY` device does not support per-frame control, manual sensor control, manual
               post-processing, arbitrary cropping regions, and has relaxed performance constraints.
@@ -11966,9 +12512,9 @@
               FULL-level capabilities.
 
               The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
-              `LIMITED` tables in the {@link
-              android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
-              createCaptureSession} documentation are guaranteed to be supported.
+              `LIMITED` tables in the
+              {@link android.hardware.camera2.CameraDevice#level-3-additional-guaranteed-configurations|ACameraDevice_createCaptureSession}
+              documentation are guaranteed to be supported.
 
               The following additional capabilities are guaranteed to be supported:
 
@@ -13075,8 +13621,9 @@
             </enum>
             <description>Whether this camera device can support identical set of stream combinations
             involving HEIC image format, compared to the
-            {@link android.hardware.camera2.CameraDevice#createCaptureSession table of combinations}
-            involving JPEG image format required for the device's hardware level and capabilities.
+            {@link android.hardware.camera2.CameraDevice#legacy-level-guaranteed-configurations
+            table of combinations} involving JPEG image format required for the device's hardware
+            level and capabilities.
             </description>
             <details>
             All the static, control and dynamic metadata tags related to JPEG apply to HEIC formats
@@ -13086,9 +13633,9 @@
 
             If a camera device supports HEIC format (ISO/IEC 23008-12), not only does it
             support the existing mandatory stream
-            {@link android.hardware.camera2.CameraDevice#createCaptureSession combinations}
-            required for the device's hardware level and capabilities, it also supports swapping
-            each JPEG stream with HEIC stream in all guaranteed combinations.
+            {@link android.hardware.camera2.CameraDevice#legacy-level-guaranteed-configurations
+            combinations} required for the device's hardware level and capabilities, it also
+            supports swapping each JPEG stream with HEIC stream in all guaranteed combinations.
 
             For every HEIC stream configured by the application, the camera framework sets up 2
             internal streams with camera HAL:
@@ -13548,5 +14095,200 @@
         </entry>
       </static>
     </section>
+    <section name="extension">
+      <controls>
+        <entry name="strength" type="int32" visibility="fwk_java_public" hal_version="3.9">
+          <description>Strength of the extension post-processing effect
+          </description>
+          <range>0 - 100</range>
+          <details>
+          This control allows Camera extension clients to configure the strength of the applied
+          extension effect. Strength equal to 0 means that the extension must not apply any
+          post-processing and return a regular captured frame. Strength equal to 100 is the
+          default level of post-processing applied when the control is not supported or not set
+          by the client. Values between 0 and 100 will have different effect depending on the
+          extension type as described below:
+
+          * {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_BOKEH BOKEH} -
+          the strength is expected to control the amount of blur.
+          * {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_HDR HDR} and
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_NIGHT NIGHT} -
+          the strength can control the amount of images fused and the brightness of the final image.
+          * {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_FACE_RETOUCH FACE_RETOUCH} -
+          the strength value will control the amount of cosmetic enhancement and skin
+          smoothing.
+
+          The control will be supported if the capture request key is part of the list generated by
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#getAvailableCaptureRequestKeys}.
+          The control is only defined and available to clients sending capture requests via
+          {@link android.hardware.camera2.CameraExtensionSession}.
+          The default value is 100.
+          </details>
+        </entry>
+      </controls>
+      <dynamic>
+        <entry name="currentType" type="int32" visibility="fwk_java_public" hal_version="3.9">
+          <description>Contains the extension type of the currently active extension
+          </description>
+          <range>Extension type value listed in
+          {@link android.hardware.camera2.CameraExtensionCharacteristics}</range>
+          <details>
+          The capture result will only be supported and included by camera extension
+          {@link android.hardware.camera2.CameraExtensionSession sessions}.
+          In case the extension session was configured to use
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_AUTOMATIC AUTO},
+          then the extension type value will indicate the currently active extension like
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_HDR HDR},
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_NIGHT NIGHT} etc.
+          , and will never return
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_AUTOMATIC AUTO}.
+          In case the extension session was configured to use an extension different from
+          {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_AUTOMATIC AUTO},
+          then the result type will always match with the configured extension type.
+          </details>
+        </entry>
+        <clone entry="android.extension.strength" kind="controls">
+        </clone>
+      </dynamic>
+    </section>
+    <section name="jpegr">
+      <static>
+        <entry name="availableJpegRStreamConfigurations" type="int32" visibility="ndk_public"
+            enum="true" container="array" typedef="streamConfiguration"
+            hwlevel="limited" hal_version="3.9">
+          <array>
+            <size>n</size>
+            <size>4</size>
+          </array>
+          <enum>
+            <value>OUTPUT</value>
+            <value>INPUT</value>
+          </enum>
+          <description>The available Jpeg/R stream
+          configurations that this camera device supports
+          (i.e. format, width, height, output/input stream).
+          </description>
+          <details>
+          The configurations are listed as `(format, width, height, input?)` tuples.
+
+          If the camera device supports Jpeg/R, it will support the same stream combinations with
+          Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported
+          by the device is determined by the device's hardware level and capabilities.
+
+          All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats.
+          Configuring JPEG and Jpeg/R streams at the same time is not supported.
+          </details>
+          <ndk_details>
+          All the configuration tuples `(format, width, height, input?)` will contain
+          AIMAGE_FORMAT_JPEGR format as OUTPUT only.
+          </ndk_details>
+        </entry>
+        <entry name="availableJpegRMinFrameDurations" type="int64" visibility="ndk_public"
+               container="array" typedef="streamConfigurationDuration" hwlevel="limited"
+               hal_version="3.9">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the minimum frame duration for each
+          format/size combination for Jpeg/R output formats.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          This should correspond to the frame duration when only that
+          stream is active, with all processing (typically in android.*.mode)
+          set to either OFF or FAST.
+
+          When multiple streams are used in a request, the minimum frame
+          duration will be max(individual stream min durations).
+
+          See android.sensor.frameDuration and
+          android.scaler.availableStallDurations for more details about
+          calculating the max frame rate.
+          </details>
+        </entry>
+        <entry name="availableJpegRStallDurations" type="int64" visibility="ndk_public"
+               container="array" typedef="streamConfigurationDuration" hwlevel="limited"
+               hal_version="3.9">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the maximum stall duration for each
+          output format/size combination for Jpeg/R streams.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          A stall duration is how much extra time would get added
+          to the normal minimum frame duration for a repeating request
+          that has streams with non-zero stall.
+
+          This functions similarly to
+          android.scaler.availableStallDurations for Jpeg/R
+          streams.
+
+          All Jpeg/R output stream formats may have a nonzero stall
+          duration.
+          </details>
+        </entry>
+        <entry name="availableJpegRStreamConfigurationsMaximumResolution" type="int32"
+          visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration"
+          hal_version="3.9">
+          <array>
+            <size>n</size>
+            <size>4</size>
+          </array>
+          <enum>
+            <value>OUTPUT</value>
+            <value>INPUT</value>
+          </enum>
+          <description>The available Jpeg/R stream
+          configurations that this camera device supports
+          (i.e. format, width, height, output/input stream).
+          </description>
+          <details>
+          Refer to android.jpegr.availableJpegRStreamConfigurations for details.
+          </details>
+          <ndk_details>
+          All the configuration tuples `(format, width, height, input?)` will contain
+          AIMAGE_FORMAT_JPEG_R format as OUTPUT only.
+          </ndk_details>
+        </entry>
+        <entry name="availableJpegRMinFrameDurationsMaximumResolution" type="int64"
+          visibility="ndk_public" container="array" typedef="streamConfigurationDuration"
+          hal_version="3.9">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the minimum frame duration for each
+          format/size combination for Jpeg/R output formats for CaptureRequests where
+          android.sensor.pixelMode is set to
+          {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          Refer to android.jpegr.availableJpegRMinFrameDurations for details.
+          </details>
+        </entry>
+        <entry name="availableJpegRStallDurationsMaximumResolution" type="int64"
+          visibility="ndk_public" container="array" typedef="streamConfigurationDuration"
+          hal_version="3.9">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the maximum stall duration for each
+          output format/size combination for Jpeg/R streams for CaptureRequests where
+          android.sensor.pixelMode is set to
+          {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          Refer to android.jpegr.availableJpegRStallDurations for details.
+          </details>
+        </entry>
+      </static>
+    </section>
   </namespace>
 </metadata>
diff --git a/camera/docs/metadata_definitions.xsd b/camera/docs/metadata_definitions.xsd
index 6df5813..2a16726 100644
--- a/camera/docs/metadata_definitions.xsd
+++ b/camera/docs/metadata_definitions.xsd
@@ -206,6 +206,7 @@
                     <enumeration value="hidden" /> <!-- java as @hide. Not included in NDK -->
                     <enumeration value="public" /> <!-- public to both java and NDK -->
                     <enumeration value="fwk_only" /> <!-- java as @hide. Not included in NDK. Not included in hal interfaces. -->
+                    <enumeration value="fwk_java_public" /> <!-- public to java. Not included in NDK. Not included in hal interfaces. -->
                 </restriction>
             </simpleType>
         </attribute>
diff --git a/camera/docs/metadata_enums.py b/camera/docs/metadata_enums.py
index 2949213..62b6330 100644
--- a/camera/docs/metadata_enums.py
+++ b/camera/docs/metadata_enums.py
@@ -39,7 +39,7 @@
   metadata = parser.metadata
 
   for sec in find_all_sections(metadata):
-    for entry in remove_synthetic_or_fwk_only(find_unique_entries(sec)):
+    for entry in remove_hal_non_visible(find_unique_entries(sec)):
       if entry.enum:
         enum_name = entry.name.removeprefix("android.")
         s = enum_name.split(".")
diff --git a/camera/docs/metadata_helpers.py b/camera/docs/metadata_helpers.py
index bd586df..f3d52a6 100644
--- a/camera/docs/metadata_helpers.py
+++ b/camera/docs/metadata_helpers.py
@@ -188,6 +188,7 @@
     "multiResolutionStreamConfigurationMap" : "MultiResolutionStreamConfigurations",
     "deviceStateSensorOrientationMap"  : "DeviceStateSensorOrientationMap",
     "dynamicRangeProfiles"   : "DynamicRangeProfiles",
+    "colorSpaceProfiles"     : "ColorSpaceProfiles",
   }
 
   if typeName not in typename_to_protobuftype:
@@ -852,7 +853,7 @@
     # Convert metadata entry "android.x.y.z" to form
     # "{@link CaptureRequest#X_Y_Z android.x.y.z}"
     def javadoc_crossref_filter(node):
-      if node.applied_visibility in ('public', 'java_public'):
+      if node.applied_visibility in ('public', 'java_public', 'fwk_java_public'):
         return '{@link %s#%s %s}' % (kind_mapping[node.kind],
                                      jkey_identifier(node.name),
                                      node.name)
@@ -862,7 +863,8 @@
     # For each public tag "android.x.y.z" referenced, add a
     # "@see CaptureRequest#X_Y_Z"
     def javadoc_crossref_see_filter(node_set):
-      node_set = (x for x in node_set if x.applied_visibility in ('public', 'java_public'))
+      node_set = (x for x in node_set if x.applied_visibility in \
+                  ('public', 'java_public', 'fwk_java_public'))
 
       text = '\n'
       for node in node_set:
@@ -1357,9 +1359,10 @@
   """
   return (e for e in entries if e.applied_visibility in visibilities)
 
-def remove_synthetic_or_fwk_only(entries):
+def remove_hal_non_visible(entries):
   """
-  Filter the given entries by removing those that are synthetic or fwk_only.
+  Filter the given entries by removing those that are not HAL visible:
+  synthetic, fwk_only, or fwk_java_public.
 
   Args:
     entries: An iterable of Entry nodes
@@ -1367,7 +1370,8 @@
   Yields:
     An iterable of Entry nodes
   """
-  return (e for e in entries if not (e.synthetic or e.visibility == 'fwk_only'))
+  return (e for e in entries if not (e.synthetic or e.visibility == 'fwk_only'
+                                     or e.visibility == 'fwk_java_public'))
 
 """
   Return the vndk version for a given hal minor version. The major version is assumed to be 3
@@ -1410,7 +1414,7 @@
   # sort keys)
   api_level_to_keys_ordered = OrderedDict()
   for api_level_ordered in sorted(api_level_to_keys.keys()):
-    api_level_to_keys_ordered[api_level_ordered] = api_level_to_keys[api_level_ordered]
+    api_level_to_keys_ordered[api_level_ordered] = sorted(api_level_to_keys[api_level_ordered])
   return api_level_to_keys_ordered
 
 def remove_synthetic(entries):
@@ -1466,7 +1470,7 @@
   """
   ret = 0
   for sec in find_all_sections(root):
-      ret += len(list(filter_has_permission_needed(remove_synthetic_or_fwk_only(find_unique_entries(sec)))))
+      ret += len(list(filter_has_permission_needed(remove_hal_non_visible(find_unique_entries(sec)))))
 
   return ret
 
@@ -1585,7 +1589,7 @@
   for section in all_sections:
     min_major_version = None
     min_minor_version = None
-    for entry in remove_synthetic_or_fwk_only(find_unique_entries(section)):
+    for entry in remove_hal_non_visible(find_unique_entries(section)):
       min_major_version = (min_major_version or entry.hal_major_version)
       min_minor_version = (min_minor_version or entry.hal_minor_version)
       if entry.hal_major_version < min_major_version or \
diff --git a/camera/docs/metadata_model.py b/camera/docs/metadata_model.py
index 31a8c74..4428c90 100644
--- a/camera/docs/metadata_model.py
+++ b/camera/docs/metadata_model.py
@@ -977,7 +977,8 @@
     id: An optional numeric string, e.g. '0' or '0xFF'
     deprecated: A boolean, True if the enum should be deprecated.
     optional: A boolean
-    visibility: A string, one of "system", "java_public", "ndk_public", "hidden", "public"
+    visibility: A string, one of "system", "java_public", "ndk_public", "hidden", "public",
+                "fwk_java_public"
     notes: A string describing the notes, or None.
     sdk_notes: A string describing extra notes for public SDK only
     ndk_notes: A string describing extra notes for public NDK only
@@ -1032,7 +1033,8 @@
     parent_enum = None
     if (self.parent is not None and self.parent.parent is not None):
       parent_enum = self.parent.parent
-    if parent_enum is not None and parent_enum.visibility == 'fwk_only' or self._visibility == 'fwk_only':
+    if parent_enum is not None and parent_enum.visibility in ('fwk_only', 'fwk_java_public') \
+        or self._visibility in ('fwk_only', 'fwk_java_public'):
       return ','
     return ', // HIDL v' + str(self._hal_major_version) + '.' + str(self.hal_minor_version)
 
@@ -1249,8 +1251,8 @@
 
   @property
   def hidl_comment_string(self):
-    if self._visibility == 'fwk_only':
-      return 'fwk_only'
+    if self._visibility in ('fwk_only', 'fwk_java_public'):
+      return self._visibility
     visibility_lj = str(self.applied_visibility).ljust(12)
     return visibility_lj + ' | HIDL v' + str(self._hal_major_version) + '.' + str(self._hal_minor_version)
 
diff --git a/camera/docs/ndk_camera_metadata_tags.mako b/camera/docs/ndk_camera_metadata_tags.mako
index c079820..c2bb859 100644
--- a/camera/docs/ndk_camera_metadata_tags.mako
+++ b/camera/docs/ndk_camera_metadata_tags.mako
@@ -79,12 +79,12 @@
 typedef enum acamera_metadata_tag {
     % for sec in find_all_sections(metadata):
 <%
-      entries = remove_synthetic_or_fwk_only(find_unique_entries(sec))
+      entries = remove_hal_non_visible(find_unique_entries(sec))
       skip_sec = all(e.applied_ndk_visible == "false" for e in entries)
       if skip_sec:
         continue
 %>\
-      % for idx,entry in enumerate(remove_synthetic_or_fwk_only(find_unique_entries(sec))):
+      % for idx,entry in enumerate(remove_hal_non_visible(find_unique_entries(sec))):
         % if entry.applied_ndk_visible == "true":
           % if entry.deprecated:
     ${ndk(entry.name) + " = " | csym,ljust(60)}// Deprecated! DO NOT USE
@@ -134,7 +134,7 @@
  */
 
 % for sec in find_all_sections(metadata):
-  % for entry in filter_ndk_visible(remove_synthetic_or_fwk_only(find_unique_entries(sec))):
+  % for entry in filter_ndk_visible(remove_hal_non_visible(find_unique_entries(sec))):
     % if entry.enum:
 // ${ndk(entry.name) | csym}
 typedef enum acamera_metadata_enum_${csym(ndk(entry.name)).lower()} {
diff --git a/camera/docs/vndk_camera_metadata_tags.mako b/camera/docs/vndk_camera_metadata_tags.mako
index 625e9b0..00247ec 100644
--- a/camera/docs/vndk_camera_metadata_tags.mako
+++ b/camera/docs/vndk_camera_metadata_tags.mako
@@ -22,7 +22,7 @@
  * ! Do not edit this file directly !
  *
  * Generated automatically from vndk_camera_metadata_tags.mako. To be included in libcameraservice
- * only by hidl/Utils.cpp.
+ * only by aidl/AidlUtils.cpp.
  */
 
 /**
diff --git a/camera/include/system/camera_metadata_tags.h b/camera/include/system/camera_metadata_tags.h
index 7bea4a8..edf5fe6 100644
--- a/camera/include/system/camera_metadata_tags.h
+++ b/camera/include/system/camera_metadata_tags.h
@@ -67,6 +67,8 @@
     ANDROID_HEIC_INFO,
     ANDROID_AUTOMOTIVE,
     ANDROID_AUTOMOTIVE_LENS,
+    ANDROID_EXTENSION,
+    ANDROID_JPEGR,
     ANDROID_SECTION_COUNT,
 
     VENDOR_SECTION = 0x8000
@@ -113,6 +115,8 @@
     ANDROID_HEIC_INFO_START        = ANDROID_HEIC_INFO         << 16,
     ANDROID_AUTOMOTIVE_START       = ANDROID_AUTOMOTIVE        << 16,
     ANDROID_AUTOMOTIVE_LENS_START  = ANDROID_AUTOMOTIVE_LENS   << 16,
+    ANDROID_EXTENSION_START        = ANDROID_EXTENSION         << 16,
+    ANDROID_JPEGR_START            = ANDROID_JPEGR             << 16,
     VENDOR_SECTION_START           = VENDOR_SECTION            << 16
 } camera_metadata_section_start_t;
 
@@ -190,6 +194,12 @@
     ANDROID_CONTROL_AF_REGIONS_SET,                   // enum         | fwk_only
     ANDROID_CONTROL_AE_REGIONS_SET,                   // enum         | fwk_only
     ANDROID_CONTROL_AWB_REGIONS_SET,                  // enum         | fwk_only
+    ANDROID_CONTROL_SETTINGS_OVERRIDE,                // enum         | public       | HIDL v3.9
+    ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,     // int32[]      | public       | HIDL v3.9
+    ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER, // int32        | system       | HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING,                      // enum         | public       | HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,            // enum         | public       | HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_STATE,                // enum         | public       | HIDL v3.9
     ANDROID_CONTROL_END,
 
     ANDROID_DEMOSAIC_MODE =                           // enum         | system       | HIDL v3.2
@@ -308,6 +318,8 @@
                                                       // enum[]       | ndk_public   | HIDL v3.8
     ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
                                                       // int64        | java_public  | HIDL v3.8
+    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+                                                      // enum[]       | ndk_public   | HIDL v3.9
     ANDROID_REQUEST_END,
 
     ANDROID_SCALER_CROP_REGION =                      // int32[]      | public       | HIDL v3.2
@@ -345,6 +357,7 @@
     ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, // enum         | ndk_public   | HIDL v3.6
     ANDROID_SCALER_CROP_REGION_SET,                   // enum         | fwk_only
     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,        // enum[]       | public       | HIDL v3.8
+    ANDROID_SCALER_RAW_CROP_REGION,                   // int32[]      | public       | HIDL v3.9
     ANDROID_SCALER_END,
 
     ANDROID_SENSOR_EXPOSURE_TIME =                    // int64        | public       | HIDL v3.2
@@ -382,6 +395,7 @@
     ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,// int32[]      | system       | HIDL v3.6
     ANDROID_SENSOR_PIXEL_MODE,                        // enum         | public       | HIDL v3.6
     ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,           // enum         | public       | HIDL v3.6
+    ANDROID_SENSOR_READOUT_TIMESTAMP,                 // enum         | fwk_java_public
     ANDROID_SENSOR_END,
 
     ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE =           // int32[]      | public       | HIDL v3.2
@@ -554,6 +568,25 @@
             ANDROID_AUTOMOTIVE_LENS_START,
     ANDROID_AUTOMOTIVE_LENS_END,
 
+    ANDROID_EXTENSION_STRENGTH =                      // int32        | fwk_java_public
+            ANDROID_EXTENSION_START,
+    ANDROID_EXTENSION_CURRENT_TYPE,                   // int32        | fwk_java_public
+    ANDROID_EXTENSION_END,
+
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS = 
+                                                      // enum[]       | ndk_public   | HIDL v3.9
+            ANDROID_JPEGR_START,
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
+                                                      // int64[]      | ndk_public   | HIDL v3.9
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,   // int64[]      | ndk_public   | HIDL v3.9
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+                                                      // enum[]       | ndk_public   | HIDL v3.9
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+                                                      // int64[]      | ndk_public   | HIDL v3.9
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+                                                      // int64[]      | ndk_public   | HIDL v3.9
+    ANDROID_JPEGR_END,
+
 } camera_metadata_tag_t;
 
 /**
@@ -788,6 +821,33 @@
     ANDROID_CONTROL_AWB_REGIONS_SET_TRUE                            ,
 } camera_metadata_enum_android_control_awb_regions_set_t;
 
+// ANDROID_CONTROL_SETTINGS_OVERRIDE
+typedef enum camera_metadata_enum_android_control_settings_override {
+    ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF                           , // HIDL v3.9
+    ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM                          , // HIDL v3.9
+    ANDROID_CONTROL_SETTINGS_OVERRIDE_VENDOR_START                   = 0x4000, // HIDL v3.9
+} camera_metadata_enum_android_control_settings_override_t;
+
+// ANDROID_CONTROL_AUTOFRAMING
+typedef enum camera_metadata_enum_android_control_autoframing {
+    ANDROID_CONTROL_AUTOFRAMING_OFF                                 , // HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_ON                                  , // HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_AUTO                                , // HIDL v3.9
+} camera_metadata_enum_android_control_autoframing_t;
+
+// ANDROID_CONTROL_AUTOFRAMING_AVAILABLE
+typedef enum camera_metadata_enum_android_control_autoframing_available {
+    ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE                     , // HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_TRUE                      , // HIDL v3.9
+} camera_metadata_enum_android_control_autoframing_available_t;
+
+// ANDROID_CONTROL_AUTOFRAMING_STATE
+typedef enum camera_metadata_enum_android_control_autoframing_state {
+    ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE                      , // HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_STATE_FRAMING                       , // HIDL v3.9
+    ANDROID_CONTROL_AUTOFRAMING_STATE_CONVERGED                     , // HIDL v3.9
+} camera_metadata_enum_android_control_autoframing_state_t;
+
 
 // ANDROID_DEMOSAIC_MODE
 typedef enum camera_metadata_enum_android_demosaic_mode {
@@ -927,6 +987,7 @@
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING    , // HIDL v3.6
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT    , // HIDL v3.8
     ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE          , // HIDL v3.8
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES     , // HIDL v3.9
 } camera_metadata_enum_android_request_available_capabilities_t;
 
 // ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP
@@ -954,6 +1015,14 @@
     ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX         = 0x1000, // HIDL v3.8
 } camera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
 
+// ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum camera_metadata_enum_android_request_available_color_space_profiles_map {
+    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED   = -1, // HIDL v3.9
+    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB          = 0, // HIDL v3.9
+    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3    = 7, // HIDL v3.9
+    ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG    = 16, // HIDL v3.9
+} camera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
 
 // ANDROID_SCALER_AVAILABLE_FORMATS
 typedef enum camera_metadata_enum_android_scaler_available_formats {
@@ -1050,6 +1119,7 @@
     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD           = 0x3, // HIDL v3.8
     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL    = 0x4, // HIDL v3.8
     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL             = 0x5, // HIDL v3.8
+    ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW            = 0x6, // HIDL v3.9
     ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START           = 0x10000, // HIDL v3.8
 } camera_metadata_enum_android_scaler_available_stream_use_cases_t;
 
@@ -1100,6 +1170,12 @@
     ANDROID_SENSOR_RAW_BINNING_FACTOR_USED_FALSE                    , // HIDL v3.6
 } camera_metadata_enum_android_sensor_raw_binning_factor_used_t;
 
+// ANDROID_SENSOR_READOUT_TIMESTAMP
+typedef enum camera_metadata_enum_android_sensor_readout_timestamp {
+    ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED                  ,
+    ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE                       ,
+} camera_metadata_enum_android_sensor_readout_timestamp_t;
+
 
 // ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
 typedef enum camera_metadata_enum_android_sensor_info_color_filter_arrangement {
@@ -1353,3 +1429,19 @@
 } camera_metadata_enum_android_automotive_lens_facing_t;
 
 
+
+// ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
+typedef enum camera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations {
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT     , // HIDL v3.9
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT      , // HIDL v3.9
+} camera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_t;
+
+// ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum camera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution {
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+                                                                     , // HIDL v3.9
+    ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+                                                                     , // HIDL v3.9
+} camera_metadata_enum_android_jpegr_available_jpeg_r_stream_configurations_maximum_resolution_t;
+
+
diff --git a/camera/src/camera_metadata.c b/camera/src/camera_metadata.c
index f9d524b..e0c1cad 100644
--- a/camera/src/camera_metadata.c
+++ b/camera/src/camera_metadata.c
@@ -545,8 +545,8 @@
         uint32_t tag_section = entry.tag >> 16;
         int tag_type = get_local_camera_metadata_tag_type(entry.tag, header);
         if (tag_type != (int)entry.type && tag_section < VENDOR_SECTION) {
-            ALOGE("%s: Entry index %zu had tag type %d, but the type was %d",
-                  __FUNCTION__, i, tag_type, entry.type);
+            ALOGE("%s: Entry index %zu (0x%x) had tag type %d, but the type was %d",
+                  __FUNCTION__, i, entry.tag, tag_type, entry.type);
             return CAMERA_METADATA_VALIDATION_ERROR;
         }
 
@@ -1076,8 +1076,7 @@
 }
 
 static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int type,
-        int count,
-        int indentation);
+        metadata_vendor_id_t vendor_id, int count, int indentation);
 
 void dump_camera_metadata(const camera_metadata_t *metadata,
         int fd,
@@ -1150,12 +1149,13 @@
         int count = entry->count;
         if (verbosity < 2 && count > 16) count = 16;
 
-        print_data(fd, data_ptr, entry->tag, entry->type, count, indentation);
+        print_data(fd, data_ptr, entry->tag, entry->type, get_camera_metadata_vendor_id(metadata),
+                   count, indentation);
     }
 }
 
-static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag,
-        int type, int count, int indentation) {
+static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int type,
+        metadata_vendor_id_t vendor_id, int count, int indentation) {
     static int values_per_line[NUM_TYPES] = {
         [TYPE_BYTE]     = 16,
         [TYPE_INT32]    = 4,
@@ -1194,8 +1194,7 @@
                     }
                     break;
                 case TYPE_INT32:
-                    value =
-                            *(int32_t*)(data_ptr + index);
+                    value = *(int32_t*)(data_ptr + index);
                     if (camera_metadata_enum_snprint(tag,
                                                      value,
                                                      value_string_tmp,
@@ -1203,8 +1202,24 @@
                         == OK) {
                         dprintf(fd, "%s ", value_string_tmp);
                     } else {
-                        dprintf(fd, "%" PRId32 " ",
-                                *(int32_t*)(data_ptr + index));
+                        dprintf(fd, "%" PRId32 " ", value);
+                        if (tag == ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS ||
+                            tag == ANDROID_REQUEST_AVAILABLE_RESULT_KEYS ||
+                            tag == ANDROID_REQUEST_AVAILABLE_SESSION_KEYS ||
+                            tag == ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS ||
+                            tag == ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS) {
+                            const char *camera_metadata_tag_name =
+                                get_camera_metadata_tag_name(value);
+                            if (camera_metadata_tag_name != NULL) {
+                                dprintf(fd, "(%s) ", camera_metadata_tag_name);
+                            } else {
+                                const char *camera_metadata_tag_vendor_id =
+                                    get_local_camera_metadata_tag_name_vendor_id(value, vendor_id);
+                                if (camera_metadata_tag_vendor_id != NULL) {
+                                    dprintf(fd, "(%s) ", camera_metadata_tag_vendor_id);
+                                }
+                            }
+                        }
                     }
                     break;
                 case TYPE_FLOAT:
diff --git a/camera/src/camera_metadata_asserts.cpp b/camera/src/camera_metadata_asserts.cpp
index 599413a..4e3748f 100644
--- a/camera/src/camera_metadata_asserts.cpp
+++ b/camera/src/camera_metadata_asserts.cpp
@@ -46,6 +46,10 @@
 #include <aidl/android/hardware/camera/metadata/ControlEnableZsl.h>
 #include <aidl/android/hardware/camera/metadata/ControlAfSceneChange.h>
 #include <aidl/android/hardware/camera/metadata/ControlExtendedSceneMode.h>
+#include <aidl/android/hardware/camera/metadata/ControlSettingsOverride.h>
+#include <aidl/android/hardware/camera/metadata/ControlAutoframing.h>
+#include <aidl/android/hardware/camera/metadata/ControlAutoframingAvailable.h>
+#include <aidl/android/hardware/camera/metadata/ControlAutoframingState.h>
 #include <aidl/android/hardware/camera/metadata/DemosaicMode.h>
 #include <aidl/android/hardware/camera/metadata/EdgeMode.h>
 #include <aidl/android/hardware/camera/metadata/FlashMode.h>
@@ -63,6 +67,7 @@
 #include <aidl/android/hardware/camera/metadata/RequestType.h>
 #include <aidl/android/hardware/camera/metadata/RequestAvailableCapabilities.h>
 #include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
+#include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
 #include <aidl/android/hardware/camera/metadata/ScalerAvailableFormats.h>
 #include <aidl/android/hardware/camera/metadata/ScalerAvailableStreamConfigurations.h>
 #include <aidl/android/hardware/camera/metadata/ScalerCroppingType.h>
@@ -108,6 +113,8 @@
 #include <aidl/android/hardware/camera/metadata/HeicInfoSupported.h>
 #include <aidl/android/hardware/camera/metadata/AutomotiveLocation.h>
 #include <aidl/android/hardware/camera/metadata/AutomotiveLensFacing.h>
+#include <aidl/android/hardware/camera/metadata/JpegrAvailableJpegRStreamConfigurations.h>
+#include <aidl/android/hardware/camera/metadata/JpegrAvailableJpegRStreamConfigurationsMaximumResolution.h>
 
 #include <system/camera_metadata_tags.h>
 
@@ -175,6 +182,10 @@
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSection::ANDROID_AUTOMOTIVE));
 static_assert(static_cast<int>(ANDROID_AUTOMOTIVE_LENS)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSection::ANDROID_AUTOMOTIVE_LENS));
+static_assert(static_cast<int>(ANDROID_EXTENSION)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSection::ANDROID_EXTENSION));
+static_assert(static_cast<int>(ANDROID_JPEGR)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSection::ANDROID_JPEGR));
 static_assert(static_cast<int>(VENDOR_SECTION)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSection::VENDOR_SECTION));
 
@@ -242,6 +253,10 @@
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSectionStart::ANDROID_AUTOMOTIVE_START));
 static_assert(static_cast<int>(ANDROID_AUTOMOTIVE_LENS_START)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSectionStart::ANDROID_AUTOMOTIVE_LENS_START));
+static_assert(static_cast<int>(ANDROID_EXTENSION_START)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSectionStart::ANDROID_EXTENSION_START));
+static_assert(static_cast<int>(ANDROID_JPEGR_START)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSectionStart::ANDROID_JPEGR_START));
 static_assert(static_cast<int>(VENDOR_SECTION_START)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataSectionStart::VENDOR_SECTION_START));
 
@@ -353,6 +368,18 @@
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_ZOOM_RATIO));
 static_assert(static_cast<int>(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION));
+static_assert(static_cast<int>(ANDROID_CONTROL_SETTINGS_OVERRIDE)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_SETTINGS_OVERRIDE));
+static_assert(static_cast<int>(ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES));
+static_assert(static_cast<int>(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER));
+static_assert(static_cast<int>(ANDROID_CONTROL_AUTOFRAMING)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_AUTOFRAMING));
+static_assert(static_cast<int>(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_AUTOFRAMING_AVAILABLE));
+static_assert(static_cast<int>(ANDROID_CONTROL_AUTOFRAMING_STATE)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_CONTROL_AUTOFRAMING_STATE));
 static_assert(static_cast<int>(ANDROID_DEMOSAIC_MODE)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_DEMOSAIC_MODE));
 static_assert(static_cast<int>(ANDROID_EDGE_MODE)
@@ -511,6 +538,8 @@
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP));
 static_assert(static_cast<int>(ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE));
+static_assert(static_cast<int>(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP));
 static_assert(static_cast<int>(ANDROID_SCALER_CROP_REGION)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_SCALER_CROP_REGION));
 static_assert(static_cast<int>(ANDROID_SCALER_AVAILABLE_FORMATS)
@@ -563,6 +592,8 @@
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED));
 static_assert(static_cast<int>(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES));
+static_assert(static_cast<int>(ANDROID_SCALER_RAW_CROP_REGION)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_SCALER_RAW_CROP_REGION));
 static_assert(static_cast<int>(ANDROID_SENSOR_EXPOSURE_TIME)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_SENSOR_EXPOSURE_TIME));
 static_assert(static_cast<int>(ANDROID_SENSOR_FRAME_DURATION)
@@ -825,6 +856,18 @@
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_AUTOMOTIVE_LOCATION));
 static_assert(static_cast<int>(ANDROID_AUTOMOTIVE_LENS_FACING)
         == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_AUTOMOTIVE_LENS_FACING));
+static_assert(static_cast<int>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS));
+static_assert(static_cast<int>(ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS));
+static_assert(static_cast<int>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS));
+static_assert(static_cast<int>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION));
+static_assert(static_cast<int>(ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION));
+static_assert(static_cast<int>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION)
+        == static_cast<int>(::aidl::android::hardware::camera::metadata::CameraMetadataTag::ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION));
 
 static_assert(static_cast<int32_t>(ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX)
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ColorCorrectionMode::ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX));
@@ -1083,6 +1126,32 @@
 static_assert(static_cast<int32_t>(ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START)
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlExtendedSceneMode::ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START));
 
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlSettingsOverride::ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlSettingsOverride::ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_SETTINGS_OVERRIDE_VENDOR_START)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlSettingsOverride::ANDROID_CONTROL_SETTINGS_OVERRIDE_VENDOR_START));
+
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_OFF)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframing::ANDROID_CONTROL_AUTOFRAMING_OFF));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_ON)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframing::ANDROID_CONTROL_AUTOFRAMING_ON));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_AUTO)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframing::ANDROID_CONTROL_AUTOFRAMING_AUTO));
+
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframingAvailable::ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_TRUE)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframingAvailable::ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_TRUE));
+
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframingState::ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_STATE_FRAMING)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframingState::ANDROID_CONTROL_AUTOFRAMING_STATE_FRAMING));
+static_assert(static_cast<int32_t>(ANDROID_CONTROL_AUTOFRAMING_STATE_CONVERGED)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ControlAutoframingState::ANDROID_CONTROL_AUTOFRAMING_STATE_CONVERGED));
+
 static_assert(static_cast<int32_t>(ANDROID_DEMOSAIC_MODE_FAST)
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::DemosaicMode::ANDROID_DEMOSAIC_MODE_FAST));
 static_assert(static_cast<int32_t>(ANDROID_DEMOSAIC_MODE_HIGH_QUALITY)
@@ -1226,6 +1295,8 @@
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::RequestAvailableCapabilities::ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT));
 static_assert(static_cast<int32_t>(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE)
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::RequestAvailableCapabilities::ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE));
+static_assert(static_cast<int32_t>(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::RequestAvailableCapabilities::ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES));
 
 static_assert(static_cast<int64_t>(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)
         == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
@@ -1254,6 +1325,15 @@
 static_assert(static_cast<int64_t>(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX)
         == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX));
 
+static_assert(static_cast<int64_t>(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+        == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap::ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED));
+static_assert(static_cast<int64_t>(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB)
+        == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap::ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB));
+static_assert(static_cast<int64_t>(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3)
+        == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap::ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3));
+static_assert(static_cast<int64_t>(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG)
+        == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap::ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG));
+
 static_assert(static_cast<int32_t>(ANDROID_SCALER_AVAILABLE_FORMATS_RAW16)
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::ScalerAvailableFormats::ANDROID_SCALER_AVAILABLE_FORMATS_RAW16));
 static_assert(static_cast<int32_t>(ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE)
@@ -1342,6 +1422,8 @@
         == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL));
 static_assert(static_cast<int64_t>(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL)
         == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL));
+static_assert(static_cast<int64_t>(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW)
+        == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW));
 static_assert(static_cast<int64_t>(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START)
         == static_cast<int64_t>(::aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases::ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START));
 
@@ -1638,3 +1720,13 @@
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::AutomotiveLensFacing::ANDROID_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_CENTER));
 static_assert(static_cast<int32_t>(ANDROID_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT)
         == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::AutomotiveLensFacing::ANDROID_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT));
+
+static_assert(static_cast<int32_t>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::JpegrAvailableJpegRStreamConfigurations::ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT));
+static_assert(static_cast<int32_t>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::JpegrAvailableJpegRStreamConfigurations::ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT));
+
+static_assert(static_cast<int32_t>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::JpegrAvailableJpegRStreamConfigurationsMaximumResolution::ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT));
+static_assert(static_cast<int32_t>(ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT)
+        == static_cast<int32_t>(::aidl::android::hardware::camera::metadata::JpegrAvailableJpegRStreamConfigurationsMaximumResolution::ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT));
diff --git a/camera/src/camera_metadata_tag_info.c b/camera/src/camera_metadata_tag_info.c
index 4a652aa..1dac402 100644
--- a/camera/src/camera_metadata_tag_info.c
+++ b/camera/src/camera_metadata_tag_info.c
@@ -64,6 +64,8 @@
     [ANDROID_HEIC_INFO]            = "android.heic.info",
     [ANDROID_AUTOMOTIVE]           = "android.automotive",
     [ANDROID_AUTOMOTIVE_LENS]      = "android.automotive.lens",
+    [ANDROID_EXTENSION]            = "android.extension",
+    [ANDROID_JPEGR]                = "android.jpegr",
 };
 
 unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2] = {
@@ -132,6 +134,10 @@
                                        ANDROID_AUTOMOTIVE_END },
     [ANDROID_AUTOMOTIVE_LENS]      = { ANDROID_AUTOMOTIVE_LENS_START,
                                        ANDROID_AUTOMOTIVE_LENS_END },
+    [ANDROID_EXTENSION]            = { ANDROID_EXTENSION_START,
+                                       ANDROID_EXTENSION_END },
+    [ANDROID_JPEGR]                = { ANDROID_JPEGR_START,
+                                       ANDROID_JPEGR_END },
 };
 
 static tag_info_t android_color_correction[ANDROID_COLOR_CORRECTION_END -
@@ -261,6 +267,18 @@
     { "aeRegionsSet",                  TYPE_BYTE   },
     [ ANDROID_CONTROL_AWB_REGIONS_SET - ANDROID_CONTROL_START ] =
     { "awbRegionsSet",                 TYPE_BYTE   },
+    [ ANDROID_CONTROL_SETTINGS_OVERRIDE - ANDROID_CONTROL_START ] =
+    { "settingsOverride",              TYPE_INT32  },
+    [ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES - ANDROID_CONTROL_START ] =
+    { "availableSettingsOverrides",    TYPE_INT32  },
+    [ ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER - ANDROID_CONTROL_START ] =
+    { "settingsOverridingFrameNumber", TYPE_INT32  },
+    [ ANDROID_CONTROL_AUTOFRAMING - ANDROID_CONTROL_START ] =
+    { "autoframing",                   TYPE_BYTE   },
+    [ ANDROID_CONTROL_AUTOFRAMING_AVAILABLE - ANDROID_CONTROL_START ] =
+    { "autoframingAvailable",          TYPE_BYTE   },
+    [ ANDROID_CONTROL_AUTOFRAMING_STATE - ANDROID_CONTROL_START ] =
+    { "autoframingState",              TYPE_BYTE   },
 };
 
 static tag_info_t android_demosaic[ANDROID_DEMOSAIC_END -
@@ -468,6 +486,9 @@
     [ ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE - ANDROID_REQUEST_START ] =
     { "recommendedTenBitDynamicRangeProfile",
                                         TYPE_INT64  },
+    [ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP - ANDROID_REQUEST_START ] =
+    { "availableColorSpaceProfilesMap",
+                                        TYPE_INT64  },
 };
 
 static tag_info_t android_scaler[ANDROID_SCALER_END -
@@ -536,6 +557,8 @@
     { "cropRegionSet",                 TYPE_BYTE   },
     [ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES - ANDROID_SCALER_START ] =
     { "availableStreamUseCases",       TYPE_INT64  },
+    [ ANDROID_SCALER_RAW_CROP_REGION - ANDROID_SCALER_START ] =
+    { "rawCropRegion",                 TYPE_INT32  },
 };
 
 static tag_info_t android_sensor[ANDROID_SENSOR_END -
@@ -617,6 +640,8 @@
     { "pixelMode",                     TYPE_BYTE   },
     [ ANDROID_SENSOR_RAW_BINNING_FACTOR_USED - ANDROID_SENSOR_START ] =
     { "rawBinningFactorUsed",          TYPE_BYTE   },
+    [ ANDROID_SENSOR_READOUT_TIMESTAMP - ANDROID_SENSOR_START ] =
+    { "readoutTimestamp",              TYPE_BYTE   },
 };
 
 static tag_info_t android_sensor_info[ANDROID_SENSOR_INFO_END -
@@ -903,6 +928,35 @@
     { "facing",                        TYPE_BYTE   },
 };
 
+static tag_info_t android_extension[ANDROID_EXTENSION_END -
+        ANDROID_EXTENSION_START] = {
+    [ ANDROID_EXTENSION_STRENGTH - ANDROID_EXTENSION_START ] =
+    { "strength",                      TYPE_INT32  },
+    [ ANDROID_EXTENSION_CURRENT_TYPE - ANDROID_EXTENSION_START ] =
+    { "currentType",                   TYPE_INT32  },
+};
+
+static tag_info_t android_jpegr[ANDROID_JPEGR_END -
+        ANDROID_JPEGR_START] = {
+    [ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS - ANDROID_JPEGR_START ] =
+    { "availableJpegRStreamConfigurations",
+                                        TYPE_INT32  },
+    [ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS - ANDROID_JPEGR_START ] =
+    { "availableJpegRMinFrameDurations",
+                                        TYPE_INT64  },
+    [ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS - ANDROID_JPEGR_START ] =
+    { "availableJpegRStallDurations",  TYPE_INT64  },
+    [ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION - ANDROID_JPEGR_START ] =
+    { "availableJpegRStreamConfigurationsMaximumResolution",
+                                        TYPE_INT32  },
+    [ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION - ANDROID_JPEGR_START ] =
+    { "availableJpegRMinFrameDurationsMaximumResolution",
+                                        TYPE_INT64  },
+    [ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION - ANDROID_JPEGR_START ] =
+    { "availableJpegRStallDurationsMaximumResolution",
+                                        TYPE_INT64  },
+};
+
 
 tag_info_t *tag_info[ANDROID_SECTION_COUNT] = {
     android_color_correction,
@@ -937,6 +991,8 @@
     android_heic_info,
     android_automotive,
     android_automotive_lens,
+    android_extension,
+    android_jpegr,
 };
 
 static int32_t tag_permission_needed[18] = {
@@ -1736,6 +1792,84 @@
             }
             break;
         }
+        case ANDROID_CONTROL_SETTINGS_OVERRIDE: {
+            switch (value) {
+                case ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM:
+                    msg = "ZOOM";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SETTINGS_OVERRIDE_VENDOR_START:
+                    msg = "VENDOR_START";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES: {
+            break;
+        }
+        case ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER: {
+            break;
+        }
+        case ANDROID_CONTROL_AUTOFRAMING: {
+            switch (value) {
+                case ANDROID_CONTROL_AUTOFRAMING_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AUTOFRAMING_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AUTOFRAMING_AUTO:
+                    msg = "AUTO";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AUTOFRAMING_AVAILABLE: {
+            switch (value) {
+                case ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE:
+                    msg = "FALSE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_TRUE:
+                    msg = "TRUE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AUTOFRAMING_STATE: {
+            switch (value) {
+                case ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE:
+                    msg = "INACTIVE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AUTOFRAMING_STATE_FRAMING:
+                    msg = "FRAMING";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AUTOFRAMING_STATE_CONVERGED:
+                    msg = "CONVERGED";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
 
         case ANDROID_DEMOSAIC_MODE: {
             switch (value) {
@@ -2275,6 +2409,10 @@
                     msg = "STREAM_USE_CASE";
                     ret = 0;
                     break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES:
+                    msg = "COLOR_SPACE_PROFILES";
+                    ret = 0;
+                    break;
                 default:
                     msg = "error: enum value out of range";
             }
@@ -2360,6 +2498,29 @@
         case ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE: {
             break;
         }
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP: {
+            switch (value) {
+                case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED:
+                    msg = "UNSPECIFIED";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+                    msg = "SRGB";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+                    msg = "DISPLAY_P3";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+                    msg = "BT2020_HLG";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
 
         case ANDROID_SCALER_CROP_REGION: {
             break;
@@ -2653,6 +2814,10 @@
                     msg = "VIDEO_CALL";
                     ret = 0;
                     break;
+                case ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW:
+                    msg = "CROPPED_RAW";
+                    ret = 0;
+                    break;
                 case ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START:
                     msg = "VENDOR_START";
                     ret = 0;
@@ -2662,6 +2827,9 @@
             }
             break;
         }
+        case ANDROID_SCALER_RAW_CROP_REGION: {
+            break;
+        }
 
         case ANDROID_SENSOR_EXPOSURE_TIME: {
             break;
@@ -2901,6 +3069,21 @@
             }
             break;
         }
+        case ANDROID_SENSOR_READOUT_TIMESTAMP: {
+            switch (value) {
+                case ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED:
+                    msg = "NOT_SUPPORTED";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE:
+                    msg = "HARDWARE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
 
         case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE: {
             break;
@@ -3734,6 +3917,56 @@
             break;
         }
 
+        case ANDROID_EXTENSION_STRENGTH: {
+            break;
+        }
+        case ANDROID_EXTENSION_CURRENT_TYPE: {
+            break;
+        }
+
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS: {
+            switch (value) {
+                case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT:
+                    msg = "OUTPUT";
+                    ret = 0;
+                    break;
+                case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT:
+                    msg = "INPUT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS: {
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS: {
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION: {
+            switch (value) {
+                case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT:
+                    msg = "OUTPUT";
+                    ret = 0;
+                    break;
+                case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT:
+                    msg = "INPUT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION: {
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION: {
+            break;
+        }
+
     }
 
     strncpy(dst, msg, size - 1);
@@ -4664,6 +4897,90 @@
                 }
             break;
         }
+        case ANDROID_CONTROL_SETTINGS_OVERRIDE: {
+                enumName = "OFF";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
+                    ret = 0;
+                    break;
+                }
+                enumName = "ZOOM";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM;
+                    ret = 0;
+                    break;
+                }
+                enumName = "VENDOR_START";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_SETTINGS_OVERRIDE_VENDOR_START;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES: {
+            break;
+        }
+        case ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER: {
+            break;
+        }
+        case ANDROID_CONTROL_AUTOFRAMING: {
+                enumName = "OFF";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_OFF;
+                    ret = 0;
+                    break;
+                }
+                enumName = "ON";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_ON;
+                    ret = 0;
+                    break;
+                }
+                enumName = "AUTO";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_AUTO;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
+        case ANDROID_CONTROL_AUTOFRAMING_AVAILABLE: {
+                enumName = "FALSE";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE;
+                    ret = 0;
+                    break;
+                }
+                enumName = "TRUE";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_TRUE;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
+        case ANDROID_CONTROL_AUTOFRAMING_STATE: {
+                enumName = "INACTIVE";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
+                    ret = 0;
+                    break;
+                }
+                enumName = "FRAMING";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_STATE_FRAMING;
+                    ret = 0;
+                    break;
+                }
+                enumName = "CONVERGED";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_CONTROL_AUTOFRAMING_STATE_CONVERGED;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
 
         case ANDROID_DEMOSAIC_MODE: {
                 enumName = "FAST";
@@ -5270,6 +5587,12 @@
                     ret = 0;
                     break;
                 }
+                enumName = "COLOR_SPACE_PROFILES";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES;
+                    ret = 0;
+                    break;
+                }
             break;
         }
         case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS: {
@@ -5374,6 +5697,33 @@
         case ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE: {
             break;
         }
+        case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP: {
+                enumName = "UNSPECIFIED";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+                    ret = 0;
+                    break;
+                }
+                enumName = "SRGB";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB;
+                    ret = 0;
+                    break;
+                }
+                enumName = "DISPLAY_P3";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3;
+                    ret = 0;
+                    break;
+                }
+                enumName = "BT2020_HLG";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
 
         case ANDROID_SCALER_CROP_REGION: {
             break;
@@ -5718,6 +6068,12 @@
                     ret = 0;
                     break;
                 }
+                enumName = "CROPPED_RAW";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
+                    ret = 0;
+                    break;
+                }
                 enumName = "VENDOR_START";
                 if (strncmp(name, enumName, size) == 0) {
                     *value = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START;
@@ -5726,6 +6082,9 @@
                 }
             break;
         }
+        case ANDROID_SCALER_RAW_CROP_REGION: {
+            break;
+        }
 
         case ANDROID_SENSOR_EXPOSURE_TIME: {
             break;
@@ -6009,6 +6368,21 @@
                 }
             break;
         }
+        case ANDROID_SENSOR_READOUT_TIMESTAMP: {
+                enumName = "NOT_SUPPORTED";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
+                    ret = 0;
+                    break;
+                }
+                enumName = "HARDWARE";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
 
         case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE: {
             break;
@@ -6912,6 +7286,56 @@
             break;
         }
 
+        case ANDROID_EXTENSION_STRENGTH: {
+            break;
+        }
+        case ANDROID_EXTENSION_CURRENT_TYPE: {
+            break;
+        }
+
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS: {
+                enumName = "OUTPUT";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT;
+                    ret = 0;
+                    break;
+                }
+                enumName = "INPUT";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_INPUT;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS: {
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS: {
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION: {
+                enumName = "OUTPUT";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT;
+                    ret = 0;
+                    break;
+                }
+                enumName = "INPUT";
+                if (strncmp(name, enumName, size) == 0) {
+                    *value = ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT;
+                    ret = 0;
+                    break;
+                }
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION: {
+            break;
+        }
+        case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION: {
+            break;
+        }
+
     }
 
     return ret;
diff --git a/private/camera/include/camera_metadata_hidden.h b/private/camera/include/camera_metadata_hidden.h
index 31f1ad1..fa201ed 100644
--- a/private/camera/include/camera_metadata_hidden.h
+++ b/private/camera/include/camera_metadata_hidden.h
@@ -17,6 +17,9 @@
 #ifndef SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H
 #define SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H
 
+#include <stdint.h>
+#include <cutils/compiler.h>
+#include <system/camera_metadata.h>
 #include <system/camera_vendor_tags.h>
 
 /**
diff --git a/radio/src/radio_metadata.c b/radio/src/radio_metadata.c
index 70ff604..10f53a7 100644
--- a/radio/src/radio_metadata.c
+++ b/radio/src/radio_metadata.c
@@ -86,6 +86,7 @@
         new_size_int *= 2;
 
     ALOGV("%s growing from %u to %u", __func__, metadata->size_int, new_size_int);
+    /* NOLINTNEXTLINE(clang-analyzer-unix.MallocSizeof) */
     metadata = realloc(metadata, new_size_int * sizeof(uint32_t));
     if (metadata == NULL) {
         return -ENOMEM;
@@ -192,6 +193,7 @@
                             const uint32_t sub_channel)
 {
     radio_metadata_buffer_t *metadata_buf =
+            /* NOLINTNEXTLINE(clang-analyzer-unix.MallocSizeof) */
             (radio_metadata_buffer_t *)calloc(RADIO_METADATA_DEFAULT_SIZE, sizeof(uint32_t));
     if (metadata_buf == NULL) {
         return -ENOMEM;
diff --git a/tests/Android.bp b/tests/Android.bp
index 34e6f82..326b504 100644
--- a/tests/Android.bp
+++ b/tests/Android.bp
@@ -28,10 +28,33 @@
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
     ],
 
     test_suites: ["device-tests"],
 
 }
+
+cc_test {
+    name: "EffectParamWrapper_tests",
+
+    shared_libs: [
+        "libbase",
+        "liblog",
+        "libutils",
+    ],
+
+    header_libs: ["libmedia_headers"],
+
+    srcs: [
+        "EffectParamWrapper_tests.cpp",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/tests/EffectParamWrapper_tests.cpp b/tests/EffectParamWrapper_tests.cpp
new file mode 100644
index 0000000..7136d77
--- /dev/null
+++ b/tests/EffectParamWrapper_tests.cpp
@@ -0,0 +1,343 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <limits>
+#include <locale>
+
+#define LOG_TAG "EffectParamWrapper_Test"
+#include <log/log.h>
+#include <system/audio_effects/audio_effects_utils.h>
+
+using namespace android;
+using android::effect::utils::EffectParamReader;
+using android::effect::utils::EffectParamWrapper;
+using android::effect::utils::EffectParamWriter;
+
+TEST(EffectParamWrapperTest, setAndGetMatches) {
+    effect_param_t param = {.psize = 2, .vsize = 0x10};
+    const auto wrapper = EffectParamWrapper(param);
+    effect_param_t target = wrapper.getEffectParam();
+    const auto targetWrapper = EffectParamWrapper(target);
+    EXPECT_TRUE(0 == std::memcmp(&param, &target, sizeof(effect_param_t)));
+    EXPECT_EQ(targetWrapper, wrapper);
+}
+
+TEST(EffectParamWrapperTest, validateCmdSize) {
+    effect_param_t param = {.psize = 1, .vsize = 4};
+    const auto wrapper = EffectParamWrapper(param);
+    size_t minCmdSize = sizeof(effect_param_t) +
+                        wrapper.getPaddedParameterSize() +
+                        wrapper.getValueSize();
+    EXPECT_FALSE(wrapper.validateCmdSize(minCmdSize - 1));
+    EXPECT_TRUE(wrapper.validateCmdSize(minCmdSize));
+    EXPECT_TRUE(wrapper.validateCmdSize(minCmdSize + 1));
+}
+
+TEST(EffectParamWrapperTest, validateCmdSizeOverflow) {
+    effect_param_t param = {.psize = std::numeric_limits<uint32_t>::max(),
+                            .vsize = std::numeric_limits<uint32_t>::max()};
+    const auto wrapper = EffectParamWrapper(param);
+    uint64_t minCmdSize = (uint64_t)sizeof(effect_param_t) +
+                          wrapper.getPaddedParameterSize() +
+                          wrapper.getValueSize();
+    EXPECT_FALSE(wrapper.validateCmdSize(minCmdSize - 1));
+    EXPECT_TRUE(wrapper.validateCmdSize(minCmdSize));
+    EXPECT_TRUE(wrapper.validateCmdSize(minCmdSize + 1));
+}
+
+TEST(EffectParamWrapperTest, validateParamValueSize) {
+    effect_param_t param = {.psize = 1, .vsize = 4};
+    const auto wrapper = EffectParamWrapper(param);
+    EXPECT_TRUE(wrapper.validateParamValueSize(param.psize, param.vsize));
+    EXPECT_TRUE(wrapper.validateParamValueSize(0, param.vsize));
+    EXPECT_TRUE(wrapper.validateParamValueSize(param.psize, 0));
+    EXPECT_FALSE(wrapper.validateParamValueSize(param.psize + 1, 0));
+    EXPECT_FALSE(wrapper.validateParamValueSize(0, param.vsize + 1));
+}
+
+TEST(EffectParamWrapperTest, padding) {
+    for (size_t i = 0; i < 0x100; i++) {
+      EXPECT_EQ(
+          sizeof(uint32_t) * ((i + sizeof(uint32_t) - 1) / sizeof(uint32_t)),
+          EffectParamWrapper::padding(i))
+          << i;
+    }
+}
+
+TEST(EffectParamWrapperTest, getPaddedParameterSize) {
+    effect_param_t psize1 = {.psize = 1};
+    const auto wrapper1 = EffectParamWrapper(psize1);
+    EXPECT_EQ(4, wrapper1.getPaddedParameterSize());
+    EXPECT_EQ(4, wrapper1.padding(psize1.psize));
+
+    effect_param_t psize4 = {.psize = 4};
+    const auto wrapper4 = EffectParamWrapper(psize4);
+    EXPECT_EQ(4, wrapper4.getPaddedParameterSize());
+    EXPECT_EQ(wrapper4.getPaddedParameterSize(), wrapper4.padding(psize4.psize));
+
+    effect_param_t psize6 = {.psize = 6};
+    const auto wrapper6 = EffectParamWrapper(psize6);
+    EXPECT_EQ(8, wrapper6.getPaddedParameterSize());
+    EXPECT_EQ(wrapper6.getPaddedParameterSize(), wrapper6.padding(psize6.psize));
+}
+
+TEST(EffectParamWrapperTest, getPVSize) {
+    effect_param_t vsize1 = {.vsize = 1, .psize = 0xff};
+    const auto wrapper1 = EffectParamWrapper(vsize1);
+    EXPECT_EQ(vsize1.vsize, wrapper1.getValueSize());
+
+    effect_param_t vsize2 = {.vsize = 0xff, .psize = 0xbe};
+    const auto wrapper2 = EffectParamWrapper(vsize2);
+    EXPECT_EQ(vsize2.vsize, wrapper2.getValueSize());
+
+    EXPECT_EQ(vsize1.psize, wrapper1.getParameterSize());
+    EXPECT_EQ(vsize1.vsize, wrapper1.getValueSize());
+    EXPECT_EQ(sizeof(effect_param_t) + EffectParamWrapper::padding(vsize1.psize) + vsize1.vsize,
+              wrapper1.getTotalSize());
+
+    EXPECT_EQ(vsize2.psize, wrapper2.getParameterSize());
+    EXPECT_EQ(vsize2.vsize, wrapper2.getValueSize());
+    EXPECT_EQ(sizeof(effect_param_t) + EffectParamWrapper::padding(vsize2.psize) + vsize2.vsize,
+              wrapper2.getTotalSize());
+}
+
+TEST(EffectParamWrapperTest, toString) {
+    effect_param_t param = {.status = -1, .psize = 2, .vsize = 4};
+    const auto wrapper = EffectParamWrapper(param);
+    EXPECT_TRUE(wrapper.toString().find("effect_param_t: ") != std::string::npos);
+    EXPECT_TRUE(wrapper.toString().find("status: -1") != std::string::npos);
+    EXPECT_TRUE(wrapper.toString().find("p: 2") != std::string::npos);
+    EXPECT_TRUE(wrapper.toString().find("v: 4") != std::string::npos);
+}
+
+TEST(EffectParamWriterTest, writeReadFromData) {
+    constexpr uint16_t testData[8] = {0x200,  0x0,    0xffffu, 0xbead,
+                                      0xfefe, 0x5555, 0xeeee,  0x2};
+    uint16_t targetData[8];
+    char buf[sizeof(effect_param_t) + 8 * sizeof(uint16_t)];
+    effect_param_t *param = (effect_param_t *)(&buf);
+    param->psize = 0;
+    param->vsize = 8 * sizeof(uint16_t);
+    auto wrapper = EffectParamWriter(*param);
+
+    // write testData into effect_param_t data buffer
+    ASSERT_EQ(OK, wrapper.writeToData(&testData, 8 * sizeof(uint16_t) /* len */,
+                                0 /* offset */, 8 * sizeof(uint16_t) /* max */))
+        << wrapper.toString();
+
+    // read first half and compare
+    std::memset(&targetData, 0, 8 * sizeof(uint16_t));
+    EXPECT_EQ(OK, wrapper.readFromData(&targetData, 4 * sizeof(uint16_t) /* len */, 0 /* offset */,
+                                       4 * sizeof(uint16_t) /* max */))
+        << wrapper.toString();
+    EXPECT_EQ(0, std::memcmp(&testData, &targetData, 4 * sizeof(uint16_t)));
+
+    // read second half and compare
+    std::memset(&targetData, 0, 8 * sizeof(uint16_t));
+    EXPECT_EQ(OK, wrapper.readFromData(&targetData, 4 * sizeof(uint16_t) /* len */,
+                                       4 * sizeof(uint16_t) /* offset */,
+                                       8 * sizeof(uint16_t) /* max */))
+        << wrapper.toString();
+    EXPECT_EQ(0, std::memcmp(testData + 4, &targetData, 4 * sizeof(uint16_t)));
+
+    // read all and compare
+    std::memset(&targetData, 0, 8 * sizeof(uint16_t));
+    EXPECT_EQ(OK, wrapper.readFromData(&targetData, 8 * sizeof(uint16_t), 0 /* offset */,
+                                       8 * sizeof(uint16_t) /* max */))
+        << wrapper.toString();
+    EXPECT_EQ(0, std::memcmp(&testData, &targetData, 8 * sizeof(uint16_t)));
+}
+
+TEST(EffectParamWriterReaderTest, writeAndReadParameterOneByOne) {
+    constexpr uint16_t data[11] = {
+        0x0f0f, 0x2020, 0xffff, 0xbead, 0x5e5e, 0x0 /* padding */,
+        0xe5e5, 0xeeee, 0x1111, 0x8888, 0xabab};
+    char buf[sizeof(effect_param_t) + 11 * sizeof(uint16_t)] = {};
+    effect_param_t *param = (effect_param_t *)(&buf);
+    param->psize = 5 * sizeof(uint16_t);
+    param->vsize = 5 * sizeof(uint16_t);
+    auto writer = EffectParamWriter(*param);
+    auto reader = EffectParamReader(*param);
+
+    // write testData into effect_param_t data buffer
+    EXPECT_EQ(OK, writer.writeToParameter(&data[0]));
+    EXPECT_EQ(OK, writer.writeToParameter(&data[1]));
+    EXPECT_EQ(OK, writer.writeToParameter(&data[2]));
+    EXPECT_EQ(OK, writer.writeToParameter(&data[3]));
+    EXPECT_EQ(OK, writer.writeToParameter(&data[4]));
+    EXPECT_NE(OK, writer.writeToParameter(&data[5])); // expect write error
+    EXPECT_EQ(OK, writer.writeToValue(&data[6]));
+    EXPECT_EQ(OK, writer.writeToValue(&data[7]));
+    EXPECT_EQ(OK, writer.writeToValue(&data[8]));
+    EXPECT_EQ(OK, writer.writeToValue(&data[9]));
+    EXPECT_EQ(OK, writer.writeToValue(&data[10]));
+    EXPECT_NE(OK, writer.writeToValue(&data[10])); // expect write error
+
+    // read and compare
+    uint16_t getData[12] = {};
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[0]));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[1]));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[2]));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[3]));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[4]));
+    EXPECT_NE(OK, reader.readFromParameter(&getData[5])); // expect read error
+
+    EXPECT_EQ(OK, reader.readFromValue(&getData[6]));
+    EXPECT_EQ(OK, reader.readFromValue(&getData[7]));
+    EXPECT_EQ(OK, reader.readFromValue(&getData[8]));
+    EXPECT_EQ(OK, reader.readFromValue(&getData[9]));
+    EXPECT_EQ(OK, reader.readFromValue(&getData[10]));
+    EXPECT_NE(OK, reader.readFromValue(&getData[11])); // expect read error
+
+    EXPECT_EQ(0, std::memcmp(&buf[sizeof(effect_param_t)], &data, 11 * sizeof(uint16_t)));
+    EXPECT_EQ(0, std::memcmp(&getData, &data, 11 * sizeof(uint16_t)));
+}
+
+TEST(EffectParamWriterReaderTest, writeAndReadParameterN) {
+    constexpr uint16_t data[11] = {
+        0x0f0f, 0x2020, 0xffff, 0x1111, 0xabab, 0x0 /* padding */,
+        0xe5e5, 0xeeee, 0xbead, 0x8888, 0x5e5e};
+    char buf[sizeof(effect_param_t) + 11 * sizeof(uint16_t)] = {};
+    effect_param_t *param = (effect_param_t *)(&buf);
+    param->psize = 5 * sizeof(uint16_t);
+    param->vsize = 5 * sizeof(uint16_t);
+    auto writer = EffectParamWriter(*param);
+    auto reader = EffectParamReader(*param);
+
+    // write testData into effect_param_t data buffer
+    EXPECT_EQ(OK, writer.writeToParameter(&data[0]));
+    EXPECT_EQ(OK, writer.writeToParameter(&data[1], 2));
+    EXPECT_EQ(OK, writer.writeToParameter(&data[3], 2));
+    EXPECT_NE(OK, writer.writeToParameter(&data[5])); // expect write error
+    EXPECT_EQ(OK, writer.writeToValue(&data[6], 3));
+    EXPECT_EQ(OK, writer.writeToValue(&data[9], 2));
+    EXPECT_NE(OK, writer.writeToValue(&data[10])); // expect write error
+
+    // read and compare
+    uint16_t getData[12] = {};
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[0], 2));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[2]));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[3], 2));
+    EXPECT_NE(OK, reader.readFromParameter(&getData[5])); // expect read error
+
+    EXPECT_EQ(OK, reader.readFromValue(&getData[6]));
+    EXPECT_EQ(OK, reader.readFromValue(&getData[7], 2));
+    EXPECT_EQ(OK, reader.readFromValue(&getData[9], 2));
+    EXPECT_NE(OK, reader.readFromValue(&getData[11])); // expect read error
+
+    EXPECT_EQ(0, std::memcmp(&buf[sizeof(effect_param_t)], &data, 11 * sizeof(uint16_t)));
+    EXPECT_EQ(0, std::memcmp(&getData, &data, 11 * sizeof(uint16_t)));
+}
+
+TEST(EffectParamWriterReaderTest, writeAndReadParameterBlock) {
+    constexpr uint16_t data[11] = {
+        0xe5e5, 0xeeee, 0x1111, 0x8888, 0xabab, 0x0, /* padding */
+        0x0f0f, 0x2020, 0xffff, 0xbead, 0x5e5e,
+    };
+    char buf[sizeof(effect_param_t) + 11 * sizeof(uint16_t)] = {};
+    effect_param_t *param = (effect_param_t *)(&buf);
+    param->psize = 5 * sizeof(uint16_t);
+    param->vsize = 5 * sizeof(uint16_t);
+    auto writer = EffectParamWriter(*param);
+    auto reader = EffectParamReader(*param);
+
+    // write testData into effect_param_t data buffer
+    EXPECT_EQ(OK, writer.writeToParameter(&data[0], 5));
+    EXPECT_NE(OK, writer.writeToParameter(&data[5])); // expect write error
+    EXPECT_EQ(OK, writer.writeToValue(&data[6], 5));
+    EXPECT_NE(OK, writer.writeToValue(&data[10])); // expect write error
+    writer.finishValueWrite();
+    EXPECT_EQ(5 * sizeof(uint16_t), writer.getValueSize());
+    EXPECT_EQ(sizeof(effect_param_t) +
+                  6 * sizeof(uint16_t) /* padded parameter */ +
+                  5 * sizeof(uint16_t),
+              writer.getTotalSize())
+        << writer.toString();
+
+    // read and compare
+    uint16_t getData[12] = {};
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[0], 5));
+    EXPECT_NE(OK, reader.readFromParameter(&getData[5])); // expect read error
+
+    EXPECT_EQ(OK, reader.readFromValue(&getData[6], 5));
+    EXPECT_NE(OK, reader.readFromValue(&getData[11])); // expect read error
+
+    EXPECT_EQ(0, std::memcmp(&buf[sizeof(effect_param_t)], &data, 11 * sizeof(uint16_t)));
+    EXPECT_EQ(0, std::memcmp(&getData, &data, 11 * sizeof(uint16_t)));
+}
+
+TEST(EffectParamWriterTest, setStatus) {
+    effect_param_t param = {.status = -1, .psize = 2, .vsize = 4};
+    auto wrapper = EffectParamWriter(param);
+    EXPECT_EQ(-1, wrapper.getStatus()) << wrapper.toString();
+    wrapper.setStatus(0);
+    EXPECT_EQ(0, wrapper.getStatus()) << wrapper.toString();
+    EXPECT_EQ(wrapper.getStatus(), param.status);
+    wrapper.setStatus(0x10);
+    EXPECT_EQ(0x10, wrapper.getStatus()) << wrapper.toString();
+    EXPECT_EQ(wrapper.getStatus(), param.status) << wrapper.toString();
+}
+
+TEST(EffectParamWriterReaderTest, writeAndReadParameterDiffSize) {
+    constexpr uint16_t data[11] = {
+        0xbead, 0x5e5e, 0x0f0f, 0x2020, 0xffff, 0x0 /* padding */,
+        0xe5e5, 0xeeee, 0x1111, 0x8888, 0xabab};
+    char buf[sizeof(effect_param_t) + 11 * sizeof(uint16_t)] = {};
+    effect_param_t *param = (effect_param_t *)(&buf);
+    param->psize = 5 * sizeof(uint16_t);
+    param->vsize = 5 * sizeof(uint16_t);
+    auto writer = EffectParamWriter(*param);
+    auto reader = EffectParamReader(*param);
+
+    // write testData into effect_param_t data buffer
+    EXPECT_EQ(OK, writer.writeToParameter(&data[0]));
+    EXPECT_EQ(OK, writer.writeToParameter((uint32_t *)&data[1]));
+    EXPECT_EQ(OK, writer.writeToParameter((uint32_t *)&data[3]));
+    EXPECT_NE(OK, writer.writeToParameter(&data[5])); // expect write error
+    EXPECT_EQ(OK, writer.writeToValue((uint32_t *)&data[6], 2));
+    EXPECT_EQ(OK, writer.writeToValue(&data[10]));
+    writer.finishValueWrite();
+    EXPECT_EQ(5 * sizeof(uint16_t), writer.getValueSize());
+    EXPECT_EQ(sizeof(effect_param_t) + 11 * sizeof(uint16_t),
+              writer.getTotalSize()) << writer.toString();
+    EXPECT_NE(OK, writer.writeToValue(&data[10])); // expect write error
+    writer.finishValueWrite();
+    EXPECT_EQ(5 * sizeof(uint16_t), writer.getValueSize());
+    EXPECT_EQ(sizeof(effect_param_t) + 11 * sizeof(uint16_t),
+              writer.getTotalSize()) << writer.toString();
+
+    // read and compare
+    uint16_t getData[12] = {};
+    EXPECT_EQ(OK, reader.readFromParameter((uint32_t *)&getData[0], 2));
+    EXPECT_EQ(OK, reader.readFromParameter(&getData[4]));
+    EXPECT_NE(OK, reader.readFromParameter(&getData[5])); // expect read error
+
+    EXPECT_EQ(OK, reader.readFromValue(&getData[6]));
+    EXPECT_EQ(OK, reader.readFromValue((uint32_t *)&getData[7]));
+    EXPECT_EQ(OK, reader.readFromValue((uint32_t *)&getData[9]));
+    EXPECT_NE(OK, reader.readFromValue(&getData[11])); // expect read error
+
+    EXPECT_EQ(0, std::memcmp(&buf[sizeof(effect_param_t)], &data, 11 * sizeof(uint16_t)));
+    EXPECT_EQ(0, std::memcmp(&getData, &data, 11 * sizeof(uint16_t)))
+        << "\n"
+        << std::hex << getData[0] << " " << getData[1] << " " << getData[2] << " "
+        << getData[3] << " " << getData[4] << " " << getData[5] << " " << getData[6] << " "
+        << getData[7] << " " << getData[8] << " " << getData[9] << " " << getData[10];
+}
diff --git a/tests/systemaudio_tests.cpp b/tests/systemaudio_tests.cpp
index 985673f..ae66c32 100644
--- a/tests/systemaudio_tests.cpp
+++ b/tests/systemaudio_tests.cpp
@@ -208,6 +208,9 @@
     runAudioDeviceTypeHelperFunction(allDeviceTypes, AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY,
             std::size(AUDIO_DEVICE_OUT_BLE_UNICAST_ARRAY), "ble unicast",
             audio_is_ble_unicast_device);
+    runAudioDeviceTypeHelperFunction(allDeviceTypes, AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY,
+            std::size(AUDIO_DEVICE_OUT_BLE_BROADCAST_ARRAY), "ble broadcast",
+            audio_is_ble_broadcast_device);
 }