Merge "Turn off CFI for other Keymaster VTS versions"
diff --git a/atrace/1.0/default/android.hardware.atrace@1.0-service.rc b/atrace/1.0/default/android.hardware.atrace@1.0-service.rc
index 7110b45..31459b4 100644
--- a/atrace/1.0/default/android.hardware.atrace@1.0-service.rc
+++ b/atrace/1.0/default/android.hardware.atrace@1.0-service.rc
@@ -14,4 +14,4 @@
     interface android.hardware.atrace@1.0::IAtraceDevice default
     class early_hal
     user system
-    group system
+    group system readtracefs
diff --git a/automotive/vehicle/OWNERS b/automotive/vehicle/OWNERS
new file mode 100644
index 0000000..429ec39
--- /dev/null
+++ b/automotive/vehicle/OWNERS
@@ -0,0 +1,3 @@
+ericjeong@google.com
+keunyoung@google.com
+shanyu@google.com
diff --git a/bluetooth/audio/aidl/Android.bp b/bluetooth/audio/aidl/Android.bp
index 60da877..5107240 100644
--- a/bluetooth/audio/aidl/Android.bp
+++ b/bluetooth/audio/aidl/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "hardware_interfaces_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["hardware_interfaces_license"],
+}
+
 aidl_interface {
     name: "android.hardware.bluetooth.audio",
     vendor_available: true,
@@ -34,6 +43,10 @@
             vndk: {
                 enabled: true,
             },
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.bluetooth",
+            ],
         },
     },
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacCapabilities.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacCapabilities.aidl
index ad44c26..899b8ca 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacCapabilities.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacCapabilities.aidl
@@ -34,9 +34,9 @@
 package android.hardware.bluetooth.audio;
 @VintfStability
 parcelable AacCapabilities {
-  android.hardware.bluetooth.audio.AacObjectType objectType;
+  android.hardware.bluetooth.audio.AacObjectType[] objectType;
   int[] sampleRateHz;
-  android.hardware.bluetooth.audio.ChannelMode channelMode;
+  android.hardware.bluetooth.audio.ChannelMode[] channelMode;
   boolean variableBitRateSupported;
   byte[] bitsPerSample;
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacObjectType.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacObjectType.aidl
index c129c66..2148244 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacObjectType.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AacObjectType.aidl
@@ -34,8 +34,8 @@
 package android.hardware.bluetooth.audio;
 @Backing(type="byte") @VintfStability
 enum AacObjectType {
-  MPEG2_LC = 1,
-  MPEG4_LC = 2,
-  MPEG4_LTP = 4,
-  MPEG4_SCALABLE = 8,
+  MPEG2_LC = 0,
+  MPEG4_LC = 1,
+  MPEG4_LTP = 2,
+  MPEG4_SCALABLE = 3,
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AptxCapabilities.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AptxCapabilities.aidl
index 4767b69..08a38e2 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AptxCapabilities.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/AptxCapabilities.aidl
@@ -35,6 +35,6 @@
 @VintfStability
 parcelable AptxCapabilities {
   int[] sampleRateHz;
-  android.hardware.bluetooth.audio.ChannelMode channelMode;
+  android.hardware.bluetooth.audio.ChannelMode[] channelMode;
   byte[] bitsPerSample;
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/ChannelMode.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/ChannelMode.aidl
index 3ca93c3..c3bc741 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/ChannelMode.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/ChannelMode.aidl
@@ -34,7 +34,7 @@
 package android.hardware.bluetooth.audio;
 @Backing(type="byte") @VintfStability
 enum ChannelMode {
-  UNKNOWN = 1,
-  MONO = 2,
-  STEREO = 4,
+  UNKNOWN = 0,
+  MONO = 1,
+  STEREO = 2,
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl
index 44b434b..3a5f951 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl
@@ -41,4 +41,5 @@
   APTX_HD = 4,
   LDAC = 5,
   LC3 = 6,
+  VENDOR = 7,
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacCapabilities.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacCapabilities.aidl
index 19e041a..aa4e4c8 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacCapabilities.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacCapabilities.aidl
@@ -35,7 +35,7 @@
 @VintfStability
 parcelable LdacCapabilities {
   int[] sampleRateHz;
-  android.hardware.bluetooth.audio.LdacChannelMode channelMode;
-  android.hardware.bluetooth.audio.LdacQualityIndex qualityIndex;
+  android.hardware.bluetooth.audio.LdacChannelMode[] channelMode;
+  android.hardware.bluetooth.audio.LdacQualityIndex[] qualityIndex;
   byte[] bitsPerSample;
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacChannelMode.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacChannelMode.aidl
index a9d6c5e..88d6faf 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacChannelMode.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacChannelMode.aidl
@@ -34,8 +34,8 @@
 package android.hardware.bluetooth.audio;
 @Backing(type="byte") @VintfStability
 enum LdacChannelMode {
-  UNKNOWN = 1,
-  STEREO = 2,
-  DUAL = 4,
-  MONO = 8,
+  UNKNOWN = 0,
+  STEREO = 1,
+  DUAL = 2,
+  MONO = 3,
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacQualityIndex.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacQualityIndex.aidl
index 693392f..35e4358 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacQualityIndex.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/LdacQualityIndex.aidl
@@ -34,8 +34,8 @@
 package android.hardware.bluetooth.audio;
 @Backing(type="byte") @VintfStability
 enum LdacQualityIndex {
-  HIGH = 1,
-  MID = 2,
-  LOW = 4,
-  ABR = 8,
+  HIGH = 0,
+  MID = 1,
+  LOW = 2,
+  ABR = 3,
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/PcmCapabilities.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/PcmCapabilities.aidl
index 6cfe5cd..0c2f87d 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/PcmCapabilities.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/PcmCapabilities.aidl
@@ -35,7 +35,7 @@
 @VintfStability
 parcelable PcmCapabilities {
   int[] sampleRateHz;
-  android.hardware.bluetooth.audio.ChannelMode channelMode;
+  android.hardware.bluetooth.audio.ChannelMode[] channelMode;
   byte[] bitsPerSample;
   int[] dataIntervalUs;
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcAllocMethod.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcAllocMethod.aidl
index 5170f16..091f6d7 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcAllocMethod.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcAllocMethod.aidl
@@ -34,6 +34,6 @@
 package android.hardware.bluetooth.audio;
 @Backing(type="byte") @VintfStability
 enum SbcAllocMethod {
-  ALLOC_MD_S = 1,
-  ALLOC_MD_L = 2,
+  ALLOC_MD_S = 0,
+  ALLOC_MD_L = 1,
 }
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcCapabilities.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcCapabilities.aidl
index ec3aa0f..c8d7e7e 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcCapabilities.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcCapabilities.aidl
@@ -35,10 +35,10 @@
 @VintfStability
 parcelable SbcCapabilities {
   int[] sampleRateHz;
-  android.hardware.bluetooth.audio.SbcChannelMode channelMode;
+  android.hardware.bluetooth.audio.SbcChannelMode[] channelMode;
   byte[] blockLength;
   byte[] numSubbands;
-  android.hardware.bluetooth.audio.SbcAllocMethod allocMethod;
+  android.hardware.bluetooth.audio.SbcAllocMethod[] allocMethod;
   byte[] bitsPerSample;
   int minBitpool;
   int maxBitpool;
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcChannelMode.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcChannelMode.aidl
index 88fca4a..6441a99 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcChannelMode.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/SbcChannelMode.aidl
@@ -34,9 +34,9 @@
 package android.hardware.bluetooth.audio;
 @Backing(type="byte") @VintfStability
 enum SbcChannelMode {
-  UNKNOWN = 1,
-  JOINT_STEREO = 2,
-  STEREO = 4,
-  DUAL = 8,
-  MONO = 16,
+  UNKNOWN = 0,
+  JOINT_STEREO = 1,
+  STEREO = 2,
+  DUAL = 3,
+  MONO = 4,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacCapabilities.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacCapabilities.aidl
index 4303883..c4153e9 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacCapabilities.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacCapabilities.aidl
@@ -24,11 +24,9 @@
  */
 @VintfStability
 parcelable AacCapabilities {
-    /* bitfield */
-    AacObjectType objectType;
+    AacObjectType[] objectType;
     int[] sampleRateHz;
-    /* bitfield */
-    ChannelMode channelMode;
+    ChannelMode[] channelMode;
     boolean variableBitRateSupported;
     byte[] bitsPerSample;
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacObjectType.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacObjectType.aidl
index 480e422..4e9958c 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacObjectType.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AacObjectType.aidl
@@ -22,17 +22,17 @@
     /**
      * MPEG-2 Low Complexity. Support is Mandatory.
      */
-    MPEG2_LC = 1,
+    MPEG2_LC,
     /**
      * MPEG-4 Low Complexity. Support is Optional.
      */
-    MPEG4_LC = 1 << 1,
+    MPEG4_LC,
     /**
      * MPEG-4 Long Term Prediction. Support is Optional.
      */
-    MPEG4_LTP = 1 << 2,
+    MPEG4_LTP,
     /**
      * MPEG-4 Scalable. Support is Optional.
      */
-    MPEG4_SCALABLE = 1 << 3,
+    MPEG4_SCALABLE,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AptxCapabilities.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AptxCapabilities.aidl
index 6a37fc6..f5605d3 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AptxCapabilities.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/AptxCapabilities.aidl
@@ -24,7 +24,6 @@
 @VintfStability
 parcelable AptxCapabilities {
     int[] sampleRateHz;
-    /* bitfield */
-    ChannelMode channelMode;
+    ChannelMode[] channelMode;
     byte[] bitsPerSample;
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/ChannelMode.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/ChannelMode.aidl
index 2df879d..6613872 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/ChannelMode.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/ChannelMode.aidl
@@ -19,7 +19,7 @@
 @VintfStability
 @Backing(type="byte")
 enum ChannelMode {
-    UNKNOWN = 1,
-    MONO = 1 << 1,
-    STEREO = 1 << 2,
+    UNKNOWN,
+    MONO,
+    STEREO,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl
index 68c60f5..9c33081 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl
@@ -26,4 +26,5 @@
     APTX_HD,
     LDAC,
     LC3,
+    VENDOR,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacCapabilities.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacCapabilities.aidl
index 44cca7e..1dbec08 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacCapabilities.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacCapabilities.aidl
@@ -26,9 +26,7 @@
 @VintfStability
 parcelable LdacCapabilities {
     int[] sampleRateHz;
-    /* bitfiled */
-    LdacChannelMode channelMode;
-    /* bitfiled */
-    LdacQualityIndex qualityIndex;
+    LdacChannelMode[] channelMode;
+    LdacQualityIndex[] qualityIndex;
     byte[] bitsPerSample;
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacChannelMode.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacChannelMode.aidl
index 3acca32..3cc910f 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacChannelMode.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacChannelMode.aidl
@@ -22,8 +22,8 @@
 @VintfStability
 @Backing(type="byte")
 enum LdacChannelMode {
-    UNKNOWN = 1,
-    STEREO = 1 << 1,
-    DUAL = 1 << 2,
-    MONO = 1 << 3,
+    UNKNOWN,
+    STEREO,
+    DUAL,
+    MONO,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacQualityIndex.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacQualityIndex.aidl
index cb12583..9993b8b 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacQualityIndex.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/LdacQualityIndex.aidl
@@ -22,17 +22,17 @@
     /**
      * 990kbps
      */
-    HIGH = 1,
+    HIGH,
     /**
      * 660kbps
      */
-    MID = 1 << 1,
+    MID,
     /**
      * 330kbps
      */
-    LOW = 1 << 2,
+    LOW,
     /**
      * Adaptive Bit Rate mode
      */
-    ABR = 1 << 3,
+    ABR,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/PcmCapabilities.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/PcmCapabilities.aidl
index f5d699e..776b777 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/PcmCapabilities.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/PcmCapabilities.aidl
@@ -24,7 +24,7 @@
 @VintfStability
 parcelable PcmCapabilities {
     int[] sampleRateHz;
-    ChannelMode channelMode;
+    ChannelMode[] channelMode;
     byte[] bitsPerSample;
     /**
      * Data interval for data transfer
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcAllocMethod.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcAllocMethod.aidl
index 7047e34..1159f30 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcAllocMethod.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcAllocMethod.aidl
@@ -22,9 +22,9 @@
     /**
      * SNR
      */
-    ALLOC_MD_S = 1,
+    ALLOC_MD_S,
     /**
      * Loudness
      */
-    ALLOC_MD_L = 1 << 1,
+    ALLOC_MD_L,
 }
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcCapabilities.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcCapabilities.aidl
index cf62ed4..743a1f7 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcCapabilities.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcCapabilities.aidl
@@ -25,12 +25,10 @@
 @VintfStability
 parcelable SbcCapabilities {
     int[] sampleRateHz;
-    /* bitfield */
-    SbcChannelMode channelMode;
+    SbcChannelMode[] channelMode;
     byte[] blockLength;
     byte[] numSubbands;
-    /* bitfield */
-    SbcAllocMethod allocMethod;
+    SbcAllocMethod[] allocMethod;
     byte[] bitsPerSample;
     /*
      * range from 2 to 250.
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcChannelMode.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcChannelMode.aidl
index 7eb38cd..68e3267 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcChannelMode.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/SbcChannelMode.aidl
@@ -19,9 +19,9 @@
 @VintfStability
 @Backing(type="byte")
 enum SbcChannelMode {
-    UNKNOWN = 1,
-    JOINT_STEREO = 1 << 1,
-    STEREO = 1 << 2,
-    DUAL = 1 << 3,
-    MONO = 1 << 4,
+    UNKNOWN,
+    JOINT_STEREO,
+    STEREO,
+    DUAL,
+    MONO,
 }
diff --git a/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.cpp b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.cpp
new file mode 100644
index 0000000..fc8a911
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderA2dpHW"
+
+#include "A2dpOffloadAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+A2dpOffloadAudioProvider::A2dpOffloadAudioProvider() {
+  session_type_ = SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH;
+}
+
+bool A2dpOffloadAudioProvider::isValid(const SessionType& session_type) {
+  return (session_type == session_type_);
+}
+
+ndk::ScopedAStatus A2dpOffloadAudioProvider::startSession(
+    const std::shared_ptr<IBluetoothAudioPort>& host_if,
+    const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+  if (audio_config.getTag() != AudioConfiguration::a2dpConfig) {
+    LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+                 << audio_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  if (!BluetoothAudioCodecs::IsOffloadCodecConfigurationValid(
+          session_type_, audio_config.get<AudioConfiguration::a2dpConfig>())) {
+    LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+                 << audio_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  return BluetoothAudioProvider::startSession(host_if, audio_config,
+                                              _aidl_return);
+}
+
+ndk::ScopedAStatus A2dpOffloadAudioProvider::onSessionReady(
+    DataMQDesc* _aidl_return) {
+  *_aidl_return = DataMQDesc();
+  BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+                                                nullptr, *audio_config_);
+  return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.h b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.h
new file mode 100644
index 0000000..5934f5b
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class A2dpOffloadAudioProvider : public BluetoothAudioProvider {
+ public:
+  A2dpOffloadAudioProvider();
+
+  bool isValid(const SessionType& session_type) override;
+
+  ndk::ScopedAStatus startSession(
+      const std::shared_ptr<IBluetoothAudioPort>& host_if,
+      const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+  ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.cpp b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.cpp
new file mode 100644
index 0000000..7e49074
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.cpp
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderA2dpSW"
+
+#include "A2dpSoftwareAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+// Here the buffer size is based on SBC
+static constexpr uint32_t kPcmFrameSize = 4;  // 16 bits per sample / stereo
+// SBC is 128, and here we choose the LCM of 16, 24, and 32
+static constexpr uint32_t kPcmFrameCount = 96;
+static constexpr uint32_t kRtpFrameSize = kPcmFrameSize * kPcmFrameCount;
+// The max counts by 1 tick (20ms) for SBC is about 7. Since using 96 for the
+// PCM counts, here we just choose a greater number
+static constexpr uint32_t kRtpFrameCount = 10;
+static constexpr uint32_t kBufferSize = kRtpFrameSize * kRtpFrameCount;
+static constexpr uint32_t kBufferCount = 2;  // double buffer
+static constexpr uint32_t kDataMqSize = kBufferSize * kBufferCount;
+
+A2dpSoftwareAudioProvider::A2dpSoftwareAudioProvider()
+    : BluetoothAudioProvider(), data_mq_(nullptr) {
+  LOG(INFO) << __func__ << " - size of audio buffer " << kDataMqSize
+            << " byte(s)";
+  std::unique_ptr<DataMQ> data_mq(
+      new DataMQ(kDataMqSize, /* EventFlag */ true));
+  if (data_mq && data_mq->isValid()) {
+    data_mq_ = std::move(data_mq);
+    session_type_ = SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH;
+  } else {
+    ALOGE_IF(!data_mq, "failed to allocate data MQ");
+    ALOGE_IF(data_mq && !data_mq->isValid(), "data MQ is invalid");
+  }
+}
+
+bool A2dpSoftwareAudioProvider::isValid(const SessionType& sessionType) {
+  return (sessionType == session_type_ && data_mq_ && data_mq_->isValid());
+}
+
+ndk::ScopedAStatus A2dpSoftwareAudioProvider::startSession(
+    const std::shared_ptr<IBluetoothAudioPort>& host_if,
+    const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+  if (audio_config.getTag() != AudioConfiguration::pcmConfig) {
+    LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+                 << audio_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  const PcmConfiguration& pcm_config =
+      audio_config.get<AudioConfiguration::pcmConfig>();
+  if (!BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(pcm_config)) {
+    LOG(WARNING) << __func__ << " - Unsupported PCM Configuration="
+                 << pcm_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+
+  return BluetoothAudioProvider::startSession(host_if, audio_config,
+                                              _aidl_return);
+}
+
+ndk::ScopedAStatus A2dpSoftwareAudioProvider::onSessionReady(
+    DataMQDesc* _aidl_return) {
+  if (data_mq_ == nullptr || !data_mq_->isValid()) {
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  *_aidl_return = data_mq_->dupeDesc();
+  BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+                                                _aidl_return, *audio_config_);
+  return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.h b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.h
new file mode 100644
index 0000000..3bc0a13
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class A2dpSoftwareAudioProvider : public BluetoothAudioProvider {
+ public:
+  A2dpSoftwareAudioProvider();
+
+  bool isValid(const SessionType& sessionType) override;
+
+  ndk::ScopedAStatus startSession(
+      const std::shared_ptr<IBluetoothAudioPort>& host_if,
+      const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+  // audio data queue for software encoding
+  std::unique_ptr<DataMQ> data_mq_;
+
+  ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/aidl/default/Android.bp b/bluetooth/audio/aidl/default/Android.bp
new file mode 100644
index 0000000..846702f
--- /dev/null
+++ b/bluetooth/audio/aidl/default/Android.bp
@@ -0,0 +1,34 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "hardware_interfaces_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["hardware_interfaces_license"],
+}
+
+cc_library_shared {
+    name: "android.hardware.bluetooth.audio-V1-impl",
+    vendor: true,
+    vintf_fragments: ["bluetooth_audio.xml"],
+    srcs: [
+        "BluetoothAudioProvider.cpp",
+        "BluetoothAudioProviderFactory.cpp",
+        "A2dpOffloadAudioProvider.cpp",
+        "A2dpSoftwareAudioProvider.cpp",
+        "HearingAidAudioProvider.cpp",
+        "LeAudioOffloadAudioProvider.cpp",
+        "LeAudioSoftwareAudioProvider.cpp",
+    ],
+    export_include_dirs: ["."],
+    header_libs: ["libhardware_headers"],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "libfmq",
+        "liblog",
+        "android.hardware.bluetooth.audio-V1-ndk",
+        "libbluetooth_audio_session_aidl",
+    ],
+}
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProvider.cpp b/bluetooth/audio/aidl/default/BluetoothAudioProvider.cpp
new file mode 100644
index 0000000..c2ffa2e
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProvider.cpp
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderStub"
+
+#include "BluetoothAudioProvider.h"
+
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+BluetoothAudioProvider::BluetoothAudioProvider() {
+  death_recipient_ = ::ndk::ScopedAIBinder_DeathRecipient(
+      AIBinder_DeathRecipient_new(binderDiedCallbackAidl));
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::startSession(
+    const std::shared_ptr<IBluetoothAudioPort>& host_if,
+    const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+  if (host_if == nullptr) {
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+  stack_iface_ = host_if;
+
+  AIBinder_linkToDeath(stack_iface_->asBinder().get(), death_recipient_.get(),
+                       this);
+
+  onSessionReady(_aidl_return);
+  return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::endSession() {
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_);
+
+  if (stack_iface_ != nullptr) {
+    BluetoothAudioSessionReport::OnSessionEnded(session_type_);
+
+    AIBinder_unlinkToDeath(stack_iface_->asBinder().get(),
+                           death_recipient_.get(), this);
+  } else {
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << " has NO session";
+  }
+
+  stack_iface_ = nullptr;
+  audio_config_ = nullptr;
+
+  return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::streamStarted(
+    BluetoothAudioStatus status) {
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+            << ", status=" << toString(status);
+
+  if (stack_iface_ != nullptr) {
+    BluetoothAudioSessionReport::ReportControlStatus(session_type_, true,
+                                                     status);
+  } else {
+    LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+                 << ", status=" << toString(status) << " has NO session";
+  }
+
+  return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::streamSuspended(
+    BluetoothAudioStatus status) {
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+            << ", status=" << toString(status);
+
+  if (stack_iface_ != nullptr) {
+    BluetoothAudioSessionReport::ReportControlStatus(session_type_, false,
+                                                     status);
+  } else {
+    LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+                 << ", status=" << toString(status) << " has NO session";
+  }
+  return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::updateAudioConfiguration(
+    const AudioConfiguration& audio_config) {
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_);
+
+  if (stack_iface_ == nullptr || audio_config_ == nullptr) {
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << " has NO session";
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+
+  if (audio_config.getTag() != audio_config_->getTag()) {
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << " audio config type is not match";
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+
+  audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+  BluetoothAudioSessionReport::ReportAudioConfigChanged(session_type_,
+                                                        *audio_config_);
+  return ndk::ScopedAStatus::ok();
+}
+
+void BluetoothAudioProvider::binderDiedCallbackAidl(void* ptr) {
+  LOG(ERROR) << __func__ << " - BluetoothAudio Service died";
+  auto provider = static_cast<BluetoothAudioProvider*>(ptr);
+  if (provider == nullptr) {
+    LOG(ERROR) << __func__ << ": Null AudioProvider HAL died";
+    return;
+  }
+  provider->endSession();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProvider.h b/bluetooth/audio/aidl/default/BluetoothAudioProvider.h
new file mode 100644
index 0000000..f7acbdf
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProvider.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <aidl/android/hardware/bluetooth/audio/BnBluetoothAudioProvider.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+#include <fmq/AidlMessageQueue.h>
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+
+using MqDataType = int8_t;
+using MqDataMode = SynchronizedReadWrite;
+using DataMQ = AidlMessageQueue<MqDataType, MqDataMode>;
+using DataMQDesc = MQDescriptor<MqDataType, MqDataMode>;
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioProvider : public BnBluetoothAudioProvider {
+ public:
+  BluetoothAudioProvider();
+
+  ndk::ScopedAStatus startSession(
+      const std::shared_ptr<IBluetoothAudioPort>& host_if,
+      const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+  ndk::ScopedAStatus endSession();
+  ndk::ScopedAStatus streamStarted(BluetoothAudioStatus status);
+  ndk::ScopedAStatus streamSuspended(BluetoothAudioStatus status);
+  ndk::ScopedAStatus updateAudioConfiguration(
+      const AudioConfiguration& audio_config);
+
+  virtual bool isValid(const SessionType& sessionType) = 0;
+
+ protected:
+  virtual ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) = 0;
+  static void binderDiedCallbackAidl(void* cookie_ptr);
+
+  ::ndk::ScopedAIBinder_DeathRecipient death_recipient_;
+
+  std::shared_ptr<IBluetoothAudioPort> stack_iface_;
+  std::unique_ptr<AudioConfiguration> audio_config_ = nullptr;
+  SessionType session_type_;
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.cpp b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.cpp
new file mode 100644
index 0000000..8e6cee7
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.cpp
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProvidersFactory"
+
+#include "BluetoothAudioProviderFactory.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <android-base/logging.h>
+
+#include "A2dpOffloadAudioProvider.h"
+#include "A2dpSoftwareAudioProvider.h"
+#include "BluetoothAudioProvider.h"
+#include "HearingAidAudioProvider.h"
+#include "LeAudioOffloadAudioProvider.h"
+#include "LeAudioSoftwareAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+BluetoothAudioProviderFactory::BluetoothAudioProviderFactory() {}
+
+ndk::ScopedAStatus BluetoothAudioProviderFactory::openProvider(
+    const SessionType session_type,
+    std::shared_ptr<IBluetoothAudioProvider>* _aidl_return) {
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type);
+  std::shared_ptr<BluetoothAudioProvider> provider = nullptr;
+
+  switch (session_type) {
+    case SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<A2dpSoftwareAudioProvider>();
+      break;
+    case SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<A2dpOffloadAudioProvider>();
+      break;
+    case SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<HearingAidAudioProvider>();
+      break;
+    case SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<LeAudioSoftwareOutputAudioProvider>();
+      break;
+    case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<LeAudioOffloadOutputAudioProvider>();
+      break;
+    case SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<LeAudioSoftwareInputAudioProvider>();
+      break;
+    case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH:
+      provider = ndk::SharedRefBase::make<LeAudioOffloadInputAudioProvider>();
+      break;
+    default:
+      provider = nullptr;
+      break;
+  }
+
+  if (provider == nullptr || !provider->isValid(session_type)) {
+    provider = nullptr;
+    LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type);
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  *_aidl_return = provider;
+
+  return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProviderFactory::getProviderCapabilities(
+    const SessionType session_type,
+    std::vector<AudioCapabilities>* _aidl_return) {
+  if (session_type == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+    auto codec_capabilities =
+        BluetoothAudioCodecs::GetA2dpOffloadCodecCapabilities(session_type);
+    _aidl_return->resize(codec_capabilities.size());
+    for (int i = 0; i < codec_capabilities.size(); i++) {
+      _aidl_return->at(i).set<AudioCapabilities::a2dpCapabilities>(
+          codec_capabilities[i]);
+    }
+  } else if (session_type ==
+                 SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+             session_type ==
+                 SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+    std::vector<LeAudioCodecCapabilitiesSetting> db_codec_capabilities =
+        BluetoothAudioCodecs::GetLeAudioOffloadCodecCapabilities(session_type);
+    if (db_codec_capabilities.size()) {
+      _aidl_return->resize(db_codec_capabilities.size());
+      for (int i = 0; i < db_codec_capabilities.size(); ++i) {
+        _aidl_return->at(i).set<AudioCapabilities::leAudioCapabilities>(
+            db_codec_capabilities[i]);
+      }
+    }
+  } else if (session_type != SessionType::UNKNOWN) {
+    auto pcm_capabilities = BluetoothAudioCodecs::GetSoftwarePcmCapabilities();
+    _aidl_return->resize(pcm_capabilities.size());
+    for (int i = 0; i < pcm_capabilities.size(); i++) {
+      _aidl_return->at(i).set<AudioCapabilities::pcmCapabilities>(
+          pcm_capabilities[i]);
+    }
+  }
+
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type)
+            << " supports " << _aidl_return->size() << " codecs";
+  return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.h b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.h
new file mode 100644
index 0000000..96d888c
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/bluetooth/audio/BnBluetoothAudioProviderFactory.h>
+
+#include "A2dpOffloadAudioProvider.h"
+#include "A2dpSoftwareAudioProvider.h"
+#include "BluetoothAudioProvider.h"
+#include "HearingAidAudioProvider.h"
+#include "LeAudioOffloadAudioProvider.h"
+#include "LeAudioSoftwareAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioProviderFactory : public BnBluetoothAudioProviderFactory {
+ public:
+  BluetoothAudioProviderFactory();
+
+  ndk::ScopedAStatus openProvider(
+      const SessionType session_type,
+      std::shared_ptr<IBluetoothAudioProvider>* _aidl_return) override;
+
+  ndk::ScopedAStatus getProviderCapabilities(
+      const SessionType session_type,
+      std::vector<AudioCapabilities>* _aidl_return) override;
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/aidl/default/HearingAidAudioProvider.cpp b/bluetooth/audio/aidl/default/HearingAidAudioProvider.cpp
new file mode 100644
index 0000000..a993059
--- /dev/null
+++ b/bluetooth/audio/aidl/default/HearingAidAudioProvider.cpp
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderHearingAid"
+
+#include "HearingAidAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static constexpr uint32_t kPcmFrameSize = 4;  // 16 bits per sample / stereo
+static constexpr uint32_t kPcmFrameCount = 128;
+static constexpr uint32_t kRtpFrameSize = kPcmFrameSize * kPcmFrameCount;
+static constexpr uint32_t kRtpFrameCount = 7;  // max counts by 1 tick (20ms)
+static constexpr uint32_t kBufferSize = kRtpFrameSize * kRtpFrameCount;
+static constexpr uint32_t kBufferCount = 1;  // single buffer
+static constexpr uint32_t kDataMqSize = kBufferSize * kBufferCount;
+
+HearingAidAudioProvider::HearingAidAudioProvider()
+    : BluetoothAudioProvider(), data_mq_(nullptr) {
+  LOG(INFO) << __func__ << " - size of audio buffer " << kDataMqSize
+            << " byte(s)";
+  std::unique_ptr<DataMQ> data_mq(
+      new DataMQ(kDataMqSize, /* EventFlag */ true));
+  if (data_mq && data_mq->isValid()) {
+    data_mq_ = std::move(data_mq);
+    session_type_ = SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH;
+  } else {
+    ALOGE_IF(!data_mq, "failed to allocate data MQ");
+    ALOGE_IF(data_mq && !data_mq->isValid(), "data MQ is invalid");
+  }
+}
+bool HearingAidAudioProvider::isValid(const SessionType& sessionType) {
+  return (sessionType == session_type_ && data_mq_ && data_mq_->isValid());
+}
+
+ndk::ScopedAStatus HearingAidAudioProvider::startSession(
+    const std::shared_ptr<IBluetoothAudioPort>& host_if,
+    const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+  if (audio_config.getTag() != AudioConfiguration::pcmConfig) {
+    LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+                 << audio_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  const auto& pcm_config = audio_config.get<AudioConfiguration::pcmConfig>();
+  if (!BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(pcm_config)) {
+    LOG(WARNING) << __func__ << " - Unsupported PCM Configuration="
+                 << pcm_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+
+  return BluetoothAudioProvider::startSession(host_if, audio_config,
+                                              _aidl_return);
+}
+
+ndk::ScopedAStatus HearingAidAudioProvider::onSessionReady(
+    DataMQDesc* _aidl_return) {
+  if (data_mq_ == nullptr || !data_mq_->isValid()) {
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  auto desc = data_mq_->dupeDesc();
+  BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+                                                &desc, *audio_config_);
+  *_aidl_return = data_mq_->dupeDesc();
+  return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/HearingAidAudioProvider.h b/bluetooth/audio/aidl/default/HearingAidAudioProvider.h
new file mode 100644
index 0000000..a7e19e9
--- /dev/null
+++ b/bluetooth/audio/aidl/default/HearingAidAudioProvider.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class HearingAidAudioProvider : public BluetoothAudioProvider {
+ public:
+  HearingAidAudioProvider();
+
+  bool isValid(const SessionType& sessionType) override;
+
+  ndk::ScopedAStatus startSession(
+      const std::shared_ptr<IBluetoothAudioPort>& host_if,
+      const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+  // audio data queue for software encoding
+  std::unique_ptr<DataMQ> data_mq_;
+
+  ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.cpp b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.cpp
new file mode 100644
index 0000000..4078783
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderLeAudioSW"
+
+#include "LeAudioOffloadAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+LeAudioOffloadOutputAudioProvider::LeAudioOffloadOutputAudioProvider()
+    : LeAudioOffloadAudioProvider() {
+  session_type_ = SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH;
+}
+
+LeAudioOffloadInputAudioProvider::LeAudioOffloadInputAudioProvider()
+    : LeAudioOffloadAudioProvider() {
+  session_type_ = SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH;
+}
+
+LeAudioOffloadAudioProvider::LeAudioOffloadAudioProvider()
+    : BluetoothAudioProvider() {}
+
+bool LeAudioOffloadAudioProvider::isValid(const SessionType& sessionType) {
+  return (sessionType == session_type_);
+}
+
+ndk::ScopedAStatus LeAudioOffloadAudioProvider::startSession(
+    const std::shared_ptr<IBluetoothAudioPort>& host_if,
+    const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+  if (audio_config.getTag() != AudioConfiguration::leAudioConfig) {
+    LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+                 << audio_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  const auto& le_audio_config =
+      audio_config.get<AudioConfiguration::leAudioConfig>();
+  if (!BluetoothAudioCodecs::IsOffloadLeAudioConfigurationValid(
+          session_type_, le_audio_config)) {
+    LOG(WARNING) << __func__ << " - Unsupported LC3 Offloaded Configuration="
+                 << le_audio_config.toString();
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+
+  return BluetoothAudioProvider::startSession(host_if, audio_config,
+                                              _aidl_return);
+}
+
+ndk::ScopedAStatus LeAudioOffloadAudioProvider::onSessionReady(
+    DataMQDesc* _aidl_return) {
+  BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+                                                nullptr, *audio_config_);
+  *_aidl_return = DataMQDesc();
+  return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.h b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.h
new file mode 100644
index 0000000..a27a2e7
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class LeAudioOffloadAudioProvider : public BluetoothAudioProvider {
+ public:
+  LeAudioOffloadAudioProvider();
+
+  bool isValid(const SessionType& sessionType) override;
+
+  ndk::ScopedAStatus startSession(
+      const std::shared_ptr<IBluetoothAudioPort>& host_if,
+      const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+  ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+class LeAudioOffloadOutputAudioProvider : public LeAudioOffloadAudioProvider {
+ public:
+  LeAudioOffloadOutputAudioProvider();
+};
+
+class LeAudioOffloadInputAudioProvider : public LeAudioOffloadAudioProvider {
+ public:
+  LeAudioOffloadInputAudioProvider();
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.cpp b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.cpp
new file mode 100644
index 0000000..f9962fd
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderLeAudioHW"
+
+#include "LeAudioSoftwareAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+#include <cstdint>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static constexpr uint32_t kBufferOutCount = 2;  // two frame buffer
+static constexpr uint32_t kBufferInCount = 2;   // two frame buffer
+
+inline uint32_t channel_mode_to_channel_count(ChannelMode channel_mode) {
+  switch (channel_mode) {
+    case ChannelMode::MONO:
+      return 1;
+    case ChannelMode::STEREO:
+      return 2;
+    default:
+      return 0;
+  }
+  return 0;
+}
+
+LeAudioSoftwareOutputAudioProvider::LeAudioSoftwareOutputAudioProvider()
+    : LeAudioSoftwareAudioProvider() {
+  session_type_ = SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH;
+}
+
+LeAudioSoftwareInputAudioProvider::LeAudioSoftwareInputAudioProvider()
+    : LeAudioSoftwareAudioProvider() {
+  session_type_ = SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH;
+}
+
+LeAudioSoftwareAudioProvider::LeAudioSoftwareAudioProvider()
+    : BluetoothAudioProvider(), data_mq_(nullptr) {}
+
+bool LeAudioSoftwareAudioProvider::isValid(const SessionType& sessionType) {
+  return (sessionType == session_type_);
+}
+
+ndk::ScopedAStatus LeAudioSoftwareAudioProvider::startSession(
+    const std::shared_ptr<IBluetoothAudioPort>& host_if,
+    const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+  if (audio_config.getTag() != AudioConfiguration::pcmConfig) {
+    LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+                 << audio_config.toString();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  const auto& pcm_config = audio_config.get<AudioConfiguration::pcmConfig>();
+  if (!BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(pcm_config)) {
+    LOG(WARNING) << __func__ << " - Unsupported PCM Configuration="
+                 << pcm_config.toString();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+
+  uint32_t buffer_modifier = 0;
+  if (session_type_ == SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH)
+    buffer_modifier = kBufferOutCount;
+  else if (session_type_ == SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH)
+    buffer_modifier = kBufferInCount;
+
+  uint32_t data_mq_size =
+      (ceil(pcm_config.sampleRateHz) / 1000) *
+      channel_mode_to_channel_count(pcm_config.channelMode) *
+      (pcm_config.bitsPerSample / 8) * (pcm_config.dataIntervalUs / 1000) *
+      buffer_modifier;
+
+  LOG(INFO) << __func__ << " - size of audio buffer " << data_mq_size
+            << " byte(s)";
+
+  std::unique_ptr<DataMQ> temp_data_mq(
+      new DataMQ(data_mq_size, /* EventFlag */ true));
+  if (temp_data_mq == nullptr || !temp_data_mq->isValid()) {
+    ALOGE_IF(!temp_data_mq, "failed to allocate data MQ");
+    ALOGE_IF(temp_data_mq && !temp_data_mq->isValid(), "data MQ is invalid");
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  data_mq_ = std::move(temp_data_mq);
+
+  return BluetoothAudioProvider::startSession(host_if, audio_config,
+                                              _aidl_return);
+}
+
+ndk::ScopedAStatus LeAudioSoftwareAudioProvider::onSessionReady(
+    DataMQDesc* _aidl_return) {
+  if (data_mq_ == nullptr || !data_mq_->isValid()) {
+    *_aidl_return = DataMQDesc();
+    return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+  }
+  *_aidl_return = data_mq_->dupeDesc();
+  BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+                                                _aidl_return, *audio_config_);
+  return ndk::ScopedAStatus::ok();
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.h b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.h
new file mode 100644
index 0000000..fa58182
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class LeAudioSoftwareAudioProvider : public BluetoothAudioProvider {
+ public:
+  LeAudioSoftwareAudioProvider();
+
+  bool isValid(const SessionType& sessionType) override;
+
+  ndk::ScopedAStatus startSession(
+      const std::shared_ptr<IBluetoothAudioPort>& host_if,
+      const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+  // audio data queue for software encoding
+  std::unique_ptr<DataMQ> data_mq_;
+
+  ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+class LeAudioSoftwareOutputAudioProvider : public LeAudioSoftwareAudioProvider {
+ public:
+  LeAudioSoftwareOutputAudioProvider();
+};
+
+class LeAudioSoftwareInputAudioProvider : public LeAudioSoftwareAudioProvider {
+ public:
+  LeAudioSoftwareInputAudioProvider();
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/bluetooth_audio.xml b/bluetooth/audio/aidl/default/bluetooth_audio.xml
new file mode 100644
index 0000000..1859a1a
--- /dev/null
+++ b/bluetooth/audio/aidl/default/bluetooth_audio.xml
@@ -0,0 +1,6 @@
+<manifest version="1.0" type="device">
+    <hal format="aidl">
+        <name>android.hardware.bluetooth.audio</name>
+        <fqname>IBluetoothAudioProviderFactory/default</fqname>
+    </hal>
+</manifest>
\ No newline at end of file
diff --git a/bluetooth/audio/utils/Android.bp b/bluetooth/audio/utils/Android.bp
index 4f712bf..974357e 100644
--- a/bluetooth/audio/utils/Android.bp
+++ b/bluetooth/audio/utils/Android.bp
@@ -32,5 +32,30 @@
         "libhidlbase",
         "liblog",
         "libutils",
+        "libbluetooth_audio_session_aidl",
+    ],
+}
+
+cc_library_shared {
+    name: "libbluetooth_audio_session_aidl",
+    vendor: true,
+    srcs: [
+        "aidl_session/BluetoothAudioCodecs.cpp",
+        "aidl_session/BluetoothAudioSession.cpp",
+        "aidl_session/HidlToAidlMiddleware.cpp",
+    ],
+    export_include_dirs: ["aidl_session/"],
+    header_libs: ["libhardware_headers"],
+    shared_libs: [
+        "android.hardware.bluetooth.audio@2.0",
+        "android.hardware.bluetooth.audio@2.1",
+        "android.hardware.bluetooth.audio@2.2",
+        "libbase",
+        "libcutils",
+        "libbinder_ndk",
+        "libfmq",
+        "liblog",
+        "android.hardware.bluetooth.audio-V1-ndk",
+        "libhidlbase",
     ],
 }
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.cpp b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.cpp
new file mode 100644
index 0000000..380732f
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.cpp
@@ -0,0 +1,491 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioCodecsAidl"
+
+#include "BluetoothAudioCodecs.h"
+
+#include <aidl/android/hardware/bluetooth/audio/AacCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/AacObjectType.h>
+#include <aidl/android/hardware/bluetooth/audio/AptxCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/ChannelMode.h>
+#include <aidl/android/hardware/bluetooth/audio/LdacCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/LdacChannelMode.h>
+#include <aidl/android/hardware/bluetooth/audio/LdacQualityIndex.h>
+#include <aidl/android/hardware/bluetooth/audio/LeAudioConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/SbcCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/SbcChannelMode.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static const PcmCapabilities kDefaultSoftwarePcmCapabilities = {
+    .sampleRateHz = {16000, 24000, 44100, 48000, 88200, 96000},
+    .channelMode = {ChannelMode::MONO, ChannelMode::STEREO},
+    .bitsPerSample = {16, 24, 32},
+    .dataIntervalUs = {},
+};
+
+static const SbcCapabilities kDefaultOffloadSbcCapability = {
+    .sampleRateHz = {44100},
+    .channelMode = {SbcChannelMode::MONO, SbcChannelMode::JOINT_STEREO},
+    .blockLength = {4, 8, 12, 16},
+    .numSubbands = {8},
+    .allocMethod = {SbcAllocMethod::ALLOC_MD_L},
+    .bitsPerSample = {16},
+    .minBitpool = 2,
+    .maxBitpool = 53};
+
+static const AacCapabilities kDefaultOffloadAacCapability = {
+    .objectType = {AacObjectType::MPEG2_LC},
+    .sampleRateHz = {44100},
+    .channelMode = {ChannelMode::STEREO},
+    .variableBitRateSupported = true,
+    .bitsPerSample = {16}};
+
+static const LdacCapabilities kDefaultOffloadLdacCapability = {
+    .sampleRateHz = {44100, 48000, 88200, 96000},
+    .channelMode = {LdacChannelMode::DUAL, LdacChannelMode::STEREO},
+    .qualityIndex = {LdacQualityIndex::HIGH},
+    .bitsPerSample = {16, 24, 32}};
+
+static const AptxCapabilities kDefaultOffloadAptxCapability = {
+    .sampleRateHz = {44100, 48000},
+    .channelMode = {ChannelMode::STEREO},
+    .bitsPerSample = {16},
+};
+
+static const AptxCapabilities kDefaultOffloadAptxHdCapability = {
+    .sampleRateHz = {44100, 48000},
+    .channelMode = {ChannelMode::STEREO},
+    .bitsPerSample = {24},
+};
+
+static const Lc3Capabilities kDefaultOffloadLc3Capability = {
+    .samplingFrequencyHz = {44100, 48000},
+    .frameDurationUs = {7500, 10000},
+    .channelMode = {ChannelMode::MONO, ChannelMode::STEREO},
+};
+
+const std::vector<CodecCapabilities> kDefaultOffloadA2dpCodecCapabilities = {
+    {.codecType = CodecType::SBC, .capabilities = {}},
+    {.codecType = CodecType::AAC, .capabilities = {}},
+    {.codecType = CodecType::LDAC, .capabilities = {}},
+    {.codecType = CodecType::APTX, .capabilities = {}},
+    {.codecType = CodecType::APTX_HD, .capabilities = {}},
+    {.codecType = CodecType::LC3, .capabilities = {}}};
+
+std::vector<LeAudioCodecCapabilitiesSetting> kDefaultOffloadLeAudioCapabilities;
+
+static const UnicastCapability kInvalidUnicastCapability = {
+    .codecType = CodecType::UNKNOWN};
+
+static const BroadcastCapability kInvalidBroadcastCapability = {
+    .codecType = CodecType::UNKNOWN};
+
+// Default Supported Codecs
+// LC3 16_1: sample rate: 16 kHz, frame duration: 7.5 ms, octets per frame: 30
+static const Lc3Capabilities kLc3Capability_16_1 = {
+    .samplingFrequencyHz = {16000},
+    .frameDurationUs = {7500},
+    .octetsPerFrame = {30}};
+
+// Default Supported Codecs
+// LC3 16_2: sample rate: 16 kHz, frame duration: 10 ms, octets per frame: 40
+static const Lc3Capabilities kLc3Capability_16_2 = {
+    .samplingFrequencyHz = {16000},
+    .frameDurationUs = {10000},
+    .octetsPerFrame = {40}};
+
+// Default Supported Codecs
+// LC3 48_4: sample rate: 48 kHz, frame duration: 10 ms, octets per frame: 120
+static const Lc3Capabilities kLc3Capability_48_4 = {
+    .samplingFrequencyHz = {48000},
+    .frameDurationUs = {10000},
+    .octetsPerFrame = {120}};
+
+static const std::vector<Lc3Capabilities> supportedLc3CapabilityList = {
+    kLc3Capability_48_4, kLc3Capability_16_2, kLc3Capability_16_1};
+
+static AudioLocation stereoAudio = static_cast<AudioLocation>(
+    static_cast<uint8_t>(AudioLocation::FRONT_LEFT) |
+    static_cast<uint8_t>(AudioLocation::FRONT_RIGHT));
+static AudioLocation monoAudio = AudioLocation::UNKNOWN;
+
+// Stores the supported setting of audio location, connected device, and the
+// channel count for each device
+std::vector<std::tuple<AudioLocation, uint8_t, uint8_t>>
+    supportedDeviceSetting = {std::make_tuple(stereoAudio, 2, 1),
+                              std::make_tuple(monoAudio, 1, 2),
+                              std::make_tuple(monoAudio, 1, 1)};
+
+template <class T>
+bool BluetoothAudioCodecs::ContainedInVector(
+    const std::vector<T>& vector, const typename identity<T>::type& target) {
+  return std::find(vector.begin(), vector.end(), target) != vector.end();
+}
+
+bool BluetoothAudioCodecs::IsOffloadSbcConfigurationValid(
+    const CodecConfiguration::CodecSpecific& codec_specific) {
+  if (codec_specific.getTag() != CodecConfiguration::CodecSpecific::sbcConfig) {
+    LOG(WARNING) << __func__
+                 << ": Invalid CodecSpecific=" << codec_specific.toString();
+    return false;
+  }
+  const SbcConfiguration sbc_data =
+      codec_specific.get<CodecConfiguration::CodecSpecific::sbcConfig>();
+
+  if (ContainedInVector(kDefaultOffloadSbcCapability.sampleRateHz,
+                        sbc_data.sampleRateHz) &&
+      ContainedInVector(kDefaultOffloadSbcCapability.blockLength,
+                        sbc_data.blockLength) &&
+      ContainedInVector(kDefaultOffloadSbcCapability.numSubbands,
+                        sbc_data.numSubbands) &&
+      ContainedInVector(kDefaultOffloadSbcCapability.bitsPerSample,
+                        sbc_data.bitsPerSample) &&
+      ContainedInVector(kDefaultOffloadSbcCapability.channelMode,
+                        sbc_data.channelMode) &&
+      ContainedInVector(kDefaultOffloadSbcCapability.allocMethod,
+                        sbc_data.allocMethod) &&
+      sbc_data.minBitpool <= sbc_data.maxBitpool &&
+      kDefaultOffloadSbcCapability.minBitpool <= sbc_data.minBitpool &&
+      kDefaultOffloadSbcCapability.maxBitpool >= sbc_data.maxBitpool) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << codec_specific.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadAacConfigurationValid(
+    const CodecConfiguration::CodecSpecific& codec_specific) {
+  if (codec_specific.getTag() != CodecConfiguration::CodecSpecific::aacConfig) {
+    LOG(WARNING) << __func__
+                 << ": Invalid CodecSpecific=" << codec_specific.toString();
+    return false;
+  }
+  const AacConfiguration aac_data =
+      codec_specific.get<CodecConfiguration::CodecSpecific::aacConfig>();
+
+  if (ContainedInVector(kDefaultOffloadAacCapability.sampleRateHz,
+                        aac_data.sampleRateHz) &&
+      ContainedInVector(kDefaultOffloadAacCapability.bitsPerSample,
+                        aac_data.bitsPerSample) &&
+      ContainedInVector(kDefaultOffloadAacCapability.channelMode,
+                        aac_data.channelMode) &&
+      ContainedInVector(kDefaultOffloadAacCapability.objectType,
+                        aac_data.objectType) &&
+      (!aac_data.variableBitRateEnabled ||
+       kDefaultOffloadAacCapability.variableBitRateSupported)) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << codec_specific.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadLdacConfigurationValid(
+    const CodecConfiguration::CodecSpecific& codec_specific) {
+  if (codec_specific.getTag() !=
+      CodecConfiguration::CodecSpecific::ldacConfig) {
+    LOG(WARNING) << __func__
+                 << ": Invalid CodecSpecific=" << codec_specific.toString();
+    return false;
+  }
+  const LdacConfiguration ldac_data =
+      codec_specific.get<CodecConfiguration::CodecSpecific::ldacConfig>();
+
+  if (ContainedInVector(kDefaultOffloadLdacCapability.sampleRateHz,
+                        ldac_data.sampleRateHz) &&
+      ContainedInVector(kDefaultOffloadLdacCapability.bitsPerSample,
+                        ldac_data.bitsPerSample) &&
+      ContainedInVector(kDefaultOffloadLdacCapability.channelMode,
+                        ldac_data.channelMode) &&
+      ContainedInVector(kDefaultOffloadLdacCapability.qualityIndex,
+                        ldac_data.qualityIndex)) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << codec_specific.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadAptxConfigurationValid(
+    const CodecConfiguration::CodecSpecific& codec_specific) {
+  if (codec_specific.getTag() !=
+      CodecConfiguration::CodecSpecific::aptxConfig) {
+    LOG(WARNING) << __func__
+                 << ": Invalid CodecSpecific=" << codec_specific.toString();
+    return false;
+  }
+  const AptxConfiguration aptx_data =
+      codec_specific.get<CodecConfiguration::CodecSpecific::aptxConfig>();
+
+  if (ContainedInVector(kDefaultOffloadAptxCapability.sampleRateHz,
+                        aptx_data.sampleRateHz) &&
+      ContainedInVector(kDefaultOffloadAptxCapability.bitsPerSample,
+                        aptx_data.bitsPerSample) &&
+      ContainedInVector(kDefaultOffloadAptxCapability.channelMode,
+                        aptx_data.channelMode)) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << codec_specific.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadAptxHdConfigurationValid(
+    const CodecConfiguration::CodecSpecific& codec_specific) {
+  if (codec_specific.getTag() !=
+      CodecConfiguration::CodecSpecific::aptxConfig) {
+    LOG(WARNING) << __func__
+                 << ": Invalid CodecSpecific=" << codec_specific.toString();
+    return false;
+  }
+  const AptxConfiguration aptx_data =
+      codec_specific.get<CodecConfiguration::CodecSpecific::aptxConfig>();
+
+  if (ContainedInVector(kDefaultOffloadAptxHdCapability.sampleRateHz,
+                        aptx_data.sampleRateHz) &&
+      ContainedInVector(kDefaultOffloadAptxHdCapability.bitsPerSample,
+                        aptx_data.bitsPerSample) &&
+      ContainedInVector(kDefaultOffloadAptxHdCapability.channelMode,
+                        aptx_data.channelMode)) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << codec_specific.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadLc3ConfigurationValid(
+    const CodecConfiguration::CodecSpecific& codec_specific) {
+  if (codec_specific.getTag() != CodecConfiguration::CodecSpecific::lc3Config) {
+    LOG(WARNING) << __func__
+                 << ": Invalid CodecSpecific=" << codec_specific.toString();
+    return false;
+  }
+  const Lc3Configuration lc3_data =
+      codec_specific.get<CodecConfiguration::CodecSpecific::lc3Config>();
+
+  if (ContainedInVector(kDefaultOffloadLc3Capability.samplingFrequencyHz,
+                        lc3_data.samplingFrequencyHz) &&
+      ContainedInVector(kDefaultOffloadLc3Capability.frameDurationUs,
+                        lc3_data.frameDurationUs) &&
+      ContainedInVector(kDefaultOffloadLc3Capability.channelMode,
+                        lc3_data.channelMode)) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << codec_specific.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadLeAudioConfigurationValid(
+    const SessionType& session_type, const LeAudioConfiguration&) {
+  if (session_type !=
+          SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
+      session_type !=
+          SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+    return false;
+  }
+  return true;
+}
+
+std::vector<PcmCapabilities>
+BluetoothAudioCodecs::GetSoftwarePcmCapabilities() {
+  return {kDefaultSoftwarePcmCapabilities};
+}
+
+std::vector<CodecCapabilities>
+BluetoothAudioCodecs::GetA2dpOffloadCodecCapabilities(
+    const SessionType& session_type) {
+  if (session_type != SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+    return {};
+  }
+  std::vector<CodecCapabilities> offload_a2dp_codec_capabilities =
+      kDefaultOffloadA2dpCodecCapabilities;
+  for (auto& codec_capability : offload_a2dp_codec_capabilities) {
+    switch (codec_capability.codecType) {
+      case CodecType::SBC:
+        codec_capability.capabilities
+            .set<CodecCapabilities::Capabilities::sbcCapabilities>(
+                kDefaultOffloadSbcCapability);
+        break;
+      case CodecType::AAC:
+        codec_capability.capabilities
+            .set<CodecCapabilities::Capabilities::aacCapabilities>(
+                kDefaultOffloadAacCapability);
+        break;
+      case CodecType::LDAC:
+        codec_capability.capabilities
+            .set<CodecCapabilities::Capabilities::ldacCapabilities>(
+                kDefaultOffloadLdacCapability);
+        break;
+      case CodecType::APTX:
+        codec_capability.capabilities
+            .set<CodecCapabilities::Capabilities::aptxCapabilities>(
+                kDefaultOffloadAptxCapability);
+        break;
+      case CodecType::APTX_HD:
+        codec_capability.capabilities
+            .set<CodecCapabilities::Capabilities::aptxCapabilities>(
+                kDefaultOffloadAptxHdCapability);
+        break;
+      case CodecType::LC3:
+        codec_capability.capabilities
+            .set<CodecCapabilities::Capabilities::lc3Capabilities>(
+                kDefaultOffloadLc3Capability);
+        break;
+      case CodecType::UNKNOWN:
+      case CodecType::VENDOR:
+        codec_capability = {};
+        break;
+    }
+  }
+  return offload_a2dp_codec_capabilities;
+}
+
+bool BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(
+    const PcmConfiguration& pcm_config) {
+  if (ContainedInVector(kDefaultSoftwarePcmCapabilities.sampleRateHz,
+                        pcm_config.sampleRateHz) &&
+      ContainedInVector(kDefaultSoftwarePcmCapabilities.bitsPerSample,
+                        pcm_config.bitsPerSample) &&
+      ContainedInVector(kDefaultSoftwarePcmCapabilities.channelMode,
+                        pcm_config.channelMode)
+      // data interval is not checked for now
+      // && pcm_config.dataIntervalUs != 0
+  ) {
+    return true;
+  }
+  LOG(WARNING) << __func__
+               << ": Unsupported CodecSpecific=" << pcm_config.toString();
+  return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadCodecConfigurationValid(
+    const SessionType& session_type, const CodecConfiguration& codec_config) {
+  if (session_type != SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+    LOG(ERROR) << __func__
+               << ": Invalid SessionType=" << toString(session_type);
+    return false;
+  }
+  const CodecConfiguration::CodecSpecific& codec_specific = codec_config.config;
+  switch (codec_config.codecType) {
+    case CodecType::SBC:
+      if (IsOffloadSbcConfigurationValid(codec_specific)) {
+        return true;
+      }
+      break;
+    case CodecType::AAC:
+      if (IsOffloadAacConfigurationValid(codec_specific)) {
+        return true;
+      }
+      break;
+    case CodecType::LDAC:
+      if (IsOffloadLdacConfigurationValid(codec_specific)) {
+        return true;
+      }
+      break;
+    case CodecType::APTX:
+      if (IsOffloadAptxConfigurationValid(codec_specific)) {
+        return true;
+      }
+      break;
+    case CodecType::APTX_HD:
+      if (IsOffloadAptxHdConfigurationValid(codec_specific)) {
+        return true;
+      }
+      break;
+    case CodecType::LC3:
+      if (IsOffloadLc3ConfigurationValid(codec_specific)) {
+        return true;
+      }
+      break;
+    case CodecType::UNKNOWN:
+    case CodecType::VENDOR:
+      break;
+  }
+  return false;
+}
+
+UnicastCapability composeUnicastLc3Capability(
+    AudioLocation audioLocation, uint8_t deviceCnt, uint8_t channelCount,
+    const Lc3Capabilities& capability) {
+  return {
+      .codecType = CodecType::LC3,
+      .supportedChannel = audioLocation,
+      .deviceCount = deviceCnt,
+      .channelCountPerDevice = channelCount,
+      .leAudioCodecCapabilities =
+          UnicastCapability::LeAudioCodecCapabilities(capability),
+  };
+}
+
+std::vector<LeAudioCodecCapabilitiesSetting>
+BluetoothAudioCodecs::GetLeAudioOffloadCodecCapabilities(
+    const SessionType& session_type) {
+  if (session_type !=
+          SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
+      session_type !=
+          SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+    return std::vector<LeAudioCodecCapabilitiesSetting>(0);
+  }
+
+  if (kDefaultOffloadLeAudioCapabilities.empty()) {
+    for (auto [audioLocation, deviceCnt, channelCount] :
+         supportedDeviceSetting) {
+      for (auto capability : supportedLc3CapabilityList) {
+        UnicastCapability lc3Capability = composeUnicastLc3Capability(
+            audioLocation, deviceCnt, channelCount, capability);
+        UnicastCapability lc3MonoDecodeCapability =
+            composeUnicastLc3Capability(monoAudio, 1, 1, capability);
+
+        // Adds the capability for encode only
+        kDefaultOffloadLeAudioCapabilities.push_back(
+            {.unicastEncodeCapability = lc3Capability,
+             .unicastDecodeCapability = kInvalidUnicastCapability,
+             .broadcastCapability = kInvalidBroadcastCapability});
+
+        // Adds the capability for decode only
+        kDefaultOffloadLeAudioCapabilities.push_back(
+            {.unicastEncodeCapability = kInvalidUnicastCapability,
+             .unicastDecodeCapability = lc3Capability,
+             .broadcastCapability = kInvalidBroadcastCapability});
+
+        // Adds the capability for the case that encode and decode exist at the
+        // same time
+        kDefaultOffloadLeAudioCapabilities.push_back(
+            {.unicastEncodeCapability = lc3Capability,
+             .unicastDecodeCapability = lc3MonoDecodeCapability,
+             .broadcastCapability = kInvalidBroadcastCapability});
+      }
+    }
+  }
+
+  return kDefaultOffloadLeAudioCapabilities;
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.h
new file mode 100644
index 0000000..c542ce5
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/bluetooth/audio/CodecCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/CodecConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/Lc3Configuration.h>
+#include <aidl/android/hardware/bluetooth/audio/LeAudioCodecCapabilitiesSetting.h>
+#include <aidl/android/hardware/bluetooth/audio/LeAudioConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/PcmCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/PcmConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+
+#include <vector>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioCodecs {
+ public:
+  static std::vector<PcmCapabilities> GetSoftwarePcmCapabilities();
+  static std::vector<CodecCapabilities> GetA2dpOffloadCodecCapabilities(
+      const SessionType& session_type);
+
+  static bool IsSoftwarePcmConfigurationValid(
+      const PcmConfiguration& pcm_config);
+  static bool IsOffloadCodecConfigurationValid(
+      const SessionType& session_type, const CodecConfiguration& codec_config);
+
+  static bool IsOffloadLeAudioConfigurationValid(
+      const SessionType& session_type, const Lc3Configuration& codec_config);
+
+  static bool IsOffloadLeAudioConfigurationValid(
+      const SessionType& session_type,
+      const LeAudioConfiguration& codec_config);
+
+  static std::vector<LeAudioCodecCapabilitiesSetting>
+  GetLeAudioOffloadCodecCapabilities(const SessionType& session_type);
+
+ private:
+  template <typename T>
+  struct identity {
+    typedef T type;
+  };
+  template <class T>
+  static bool ContainedInVector(const std::vector<T>& vector,
+                                const typename identity<T>::type& target);
+  template <class T>
+  static bool ContainedInBitmask(const T& bitmask, const T& target);
+  static bool IsSingleBit(uint32_t bitmasks, uint32_t bitfield);
+  static bool IsOffloadSbcConfigurationValid(
+      const CodecConfiguration::CodecSpecific& codec_specific);
+  static bool IsOffloadAacConfigurationValid(
+      const CodecConfiguration::CodecSpecific& codec_specific);
+  static bool IsOffloadLdacConfigurationValid(
+      const CodecConfiguration::CodecSpecific& codec_specific);
+  static bool IsOffloadAptxConfigurationValid(
+      const CodecConfiguration::CodecSpecific& codec_specific);
+  static bool IsOffloadAptxHdConfigurationValid(
+      const CodecConfiguration::CodecSpecific& codec_specific);
+  static bool IsOffloadLc3ConfigurationValid(
+      const CodecConfiguration::CodecSpecific& codec_specific);
+  static bool IsOffloadLeAudioConfigurationValid(
+      const SessionType& session_type, const LeAudioCodecConfiguration&);
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.cpp b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.cpp
new file mode 100644
index 0000000..95e473e
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.cpp
@@ -0,0 +1,581 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/types.h>
+#define LOG_TAG "BTAudioSessionAidl"
+
+#include <android-base/logging.h>
+#include <android-base/stringprintf.h>
+#include <android/binder_manager.h>
+
+#include "BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static constexpr int kFmqSendTimeoutMs = 1000;  // 1000 ms timeout for sending
+static constexpr int kFmqReceiveTimeoutMs =
+    1000;                               // 1000 ms timeout for receiving
+static constexpr int kWritePollMs = 1;  // polled non-blocking interval
+static constexpr int kReadPollMs = 1;   // polled non-blocking interval
+
+const CodecConfiguration BluetoothAudioSession::kInvalidCodecConfiguration = {};
+const LeAudioConfiguration kInvalidLeAudioConfiguration = {};
+AudioConfiguration BluetoothAudioSession::invalidSoftwareAudioConfiguration =
+    {};
+AudioConfiguration BluetoothAudioSession::invalidOffloadAudioConfiguration = {};
+AudioConfiguration BluetoothAudioSession::invalidLeOffloadAudioConfig = {};
+
+BluetoothAudioSession::BluetoothAudioSession(const SessionType& session_type)
+    : session_type_(session_type), stack_iface_(nullptr), data_mq_(nullptr) {
+  invalidSoftwareAudioConfiguration.set<AudioConfiguration::pcmConfig>(
+      kInvalidPcmConfiguration);
+  invalidOffloadAudioConfiguration.set<AudioConfiguration::a2dpConfig>(
+      kInvalidCodecConfiguration);
+  invalidLeOffloadAudioConfig.set<AudioConfiguration::leAudioConfig>(
+      kInvalidLeAudioConfiguration);
+}
+
+/***
+ *
+ * Callback methods
+ *
+ ***/
+
+void BluetoothAudioSession::OnSessionStarted(
+    const std::shared_ptr<IBluetoothAudioPort> stack_iface,
+    const DataMQDesc* mq_desc, const AudioConfiguration& audio_config) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (stack_iface == nullptr) {
+    LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+               << ", IBluetoothAudioPort Invalid";
+  } else if (!UpdateAudioConfig(audio_config)) {
+    LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+               << ", AudioConfiguration=" << audio_config.toString()
+               << " Invalid";
+  } else if (!UpdateDataPath(mq_desc)) {
+    LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+               << " MqDescriptor Invalid";
+    if (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+      audio_config_ = std::make_unique<AudioConfiguration>(
+          invalidOffloadAudioConfiguration);
+    } else {
+      audio_config_ = std::make_unique<AudioConfiguration>(
+          invalidSoftwareAudioConfiguration);
+    }
+  } else {
+    stack_iface_ = stack_iface;
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << ", AudioConfiguration=" << audio_config.toString();
+    ReportSessionStatus();
+  }
+}
+
+void BluetoothAudioSession::OnSessionEnded() {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  bool toggled = IsSessionReady();
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_);
+  if (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+    audio_config_ =
+        std::make_unique<AudioConfiguration>(invalidOffloadAudioConfiguration);
+  } else {
+    audio_config_ =
+        std::make_unique<AudioConfiguration>(invalidSoftwareAudioConfiguration);
+  }
+  stack_iface_ = nullptr;
+  UpdateDataPath(nullptr);
+  if (toggled) {
+    ReportSessionStatus();
+  }
+}
+
+/***
+ *
+ * Util methods
+ *
+ ***/
+
+const AudioConfiguration& BluetoothAudioSession::GetAudioConfig() {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    if (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+      return invalidOffloadAudioConfiguration;
+    } else {
+      return invalidSoftwareAudioConfiguration;
+    }
+    switch (session_type_) {
+      case SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+        return invalidOffloadAudioConfiguration;
+      case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+      case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH:
+        return invalidLeOffloadAudioConfig;
+      default:
+        return invalidSoftwareAudioConfiguration;
+    }
+  }
+  return *audio_config_;
+}
+
+void BluetoothAudioSession::ReportAudioConfigChanged(
+    const AudioConfiguration& audio_config) {
+  if (session_type_ !=
+          SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
+      session_type_ !=
+          SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+    return;
+  }
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+  if (observers_.empty()) {
+    LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+                 << " has NO port state observer";
+    return;
+  }
+  for (auto& observer : observers_) {
+    uint16_t cookie = observer.first;
+    std::shared_ptr<struct PortStatusCallbacks> cb = observer.second;
+    LOG(INFO) << __func__ << " for SessionType=" << toString(session_type_)
+              << ", bluetooth_audio=0x"
+              << ::android::base::StringPrintf("%04x", cookie);
+    if (cb->audio_configuration_changed_cb_ != nullptr) {
+      cb->audio_configuration_changed_cb_(cookie);
+    }
+  }
+}
+
+bool BluetoothAudioSession::IsSessionReady() {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+
+  bool is_mq_valid =
+      (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+       session_type_ ==
+           SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+       session_type_ ==
+           SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH ||
+       (data_mq_ != nullptr && data_mq_->isValid()));
+  return stack_iface_ != nullptr && is_mq_valid;
+}
+
+/***
+ *
+ * Status callback methods
+ *
+ ***/
+
+uint16_t BluetoothAudioSession::RegisterStatusCback(
+    const PortStatusCallbacks& callbacks) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  uint16_t cookie = ObserversCookieGetInitValue(session_type_);
+  uint16_t cookie_upper_bound = ObserversCookieGetUpperBound(session_type_);
+
+  while (cookie < cookie_upper_bound) {
+    if (observers_.find(cookie) == observers_.end()) {
+      break;
+    }
+    ++cookie;
+  }
+  if (cookie >= cookie_upper_bound) {
+    LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+               << " has " << observers_.size()
+               << " observers already (No Resource)";
+    return kObserversCookieUndefined;
+  }
+  std::shared_ptr<PortStatusCallbacks> cb =
+      std::make_shared<PortStatusCallbacks>();
+  *cb = callbacks;
+  observers_[cookie] = cb;
+  return cookie;
+}
+
+void BluetoothAudioSession::UnregisterStatusCback(uint16_t cookie) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (observers_.erase(cookie) != 1) {
+    LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+                 << " no such provider=0x"
+                 << ::android::base::StringPrintf("%04x", cookie);
+  }
+}
+
+/***
+ *
+ * Stream methods
+ *
+ ***/
+
+bool BluetoothAudioSession::StartStream() {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+               << " has NO session";
+    return false;
+  }
+  auto hal_retval = stack_iface_->startStream();
+  if (!hal_retval.isOk()) {
+    LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+                 << toString(session_type_) << " failed";
+    return false;
+  }
+  return true;
+}
+
+bool BluetoothAudioSession::SuspendStream() {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+               << " has NO session";
+    return false;
+  }
+  auto hal_retval = stack_iface_->suspendStream();
+  if (!hal_retval.isOk()) {
+    LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+                 << toString(session_type_) << " failed";
+    return false;
+  }
+  return true;
+}
+
+void BluetoothAudioSession::StopStream() {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    return;
+  }
+  auto hal_retval = stack_iface_->stopStream();
+  if (!hal_retval.isOk()) {
+    LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+                 << toString(session_type_) << " failed";
+  }
+}
+
+/***
+ *
+ * Private methods
+ *
+ ***/
+
+bool BluetoothAudioSession::UpdateDataPath(const DataMQDesc* mq_desc) {
+  if (mq_desc == nullptr) {
+    // usecase of reset by nullptr
+    data_mq_ = nullptr;
+    return true;
+  }
+  std::unique_ptr<DataMQ> temp_mq;
+  temp_mq.reset(new DataMQ(*mq_desc));
+  if (!temp_mq || !temp_mq->isValid()) {
+    data_mq_ = nullptr;
+    return false;
+  }
+  data_mq_ = std::move(temp_mq);
+  return true;
+}
+
+bool BluetoothAudioSession::UpdateAudioConfig(
+    const AudioConfiguration& audio_config) {
+  bool is_software_session =
+      (session_type_ == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
+       session_type_ == SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH ||
+       session_type_ == SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH ||
+       session_type_ == SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH);
+  bool is_offload_a2dp_session =
+      (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH);
+  bool is_offload_le_audio_session =
+      (session_type_ ==
+           SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+       session_type_ ==
+           SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH);
+  auto audio_config_tag = audio_config.getTag();
+  bool is_software_audio_config =
+      (is_software_session &&
+       audio_config_tag == AudioConfiguration::pcmConfig);
+  bool is_a2dp_offload_audio_config =
+      (is_offload_a2dp_session &&
+       audio_config_tag == AudioConfiguration::a2dpConfig);
+  bool is_le_audio_offload_audio_config =
+      (is_offload_le_audio_session &&
+       audio_config_tag == AudioConfiguration::leAudioConfig);
+  if (!is_software_audio_config && !is_a2dp_offload_audio_config &&
+      !is_le_audio_offload_audio_config) {
+    return false;
+  }
+  audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+  return true;
+}
+
+void BluetoothAudioSession::ReportSessionStatus() {
+  // This is locked already by OnSessionStarted / OnSessionEnded
+  if (observers_.empty()) {
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << " has NO port state observer";
+    return;
+  }
+  for (auto& observer : observers_) {
+    uint16_t cookie = observer.first;
+    std::shared_ptr<PortStatusCallbacks> callback = observer.second;
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << " notify to bluetooth_audio=0x"
+              << ::android::base::StringPrintf("%04x", cookie);
+    callback->session_changed_cb_(cookie);
+  }
+}
+
+/***
+ *
+ * PCM methods
+ *
+ ***/
+
+size_t BluetoothAudioSession::OutWritePcmData(const void* buffer,
+                                              size_t bytes) {
+  if (buffer == nullptr || bytes <= 0) {
+    return 0;
+  }
+  size_t total_written = 0;
+  int timeout_ms = kFmqSendTimeoutMs;
+  do {
+    std::unique_lock<std::recursive_mutex> lock(mutex_);
+    if (!IsSessionReady()) {
+      break;
+    }
+    size_t num_bytes_to_write = data_mq_->availableToWrite();
+    if (num_bytes_to_write) {
+      if (num_bytes_to_write > (bytes - total_written)) {
+        num_bytes_to_write = bytes - total_written;
+      }
+
+      if (!data_mq_->write(
+              static_cast<const MQDataType*>(buffer) + total_written,
+              num_bytes_to_write)) {
+        LOG(ERROR) << "FMQ datapath writing " << total_written << "/" << bytes
+                   << " failed";
+        return total_written;
+      }
+      total_written += num_bytes_to_write;
+    } else if (timeout_ms >= kWritePollMs) {
+      lock.unlock();
+      usleep(kWritePollMs * 1000);
+      timeout_ms -= kWritePollMs;
+    } else {
+      LOG(DEBUG) << "Data " << total_written << "/" << bytes << " overflow "
+                 << (kFmqSendTimeoutMs - timeout_ms) << " ms";
+      return total_written;
+    }
+  } while (total_written < bytes);
+  return total_written;
+}
+
+size_t BluetoothAudioSession::InReadPcmData(void* buffer, size_t bytes) {
+  if (buffer == nullptr || bytes <= 0) {
+    return 0;
+  }
+  size_t total_read = 0;
+  int timeout_ms = kFmqReceiveTimeoutMs;
+  do {
+    std::unique_lock<std::recursive_mutex> lock(mutex_);
+    if (!IsSessionReady()) {
+      break;
+    }
+    size_t num_bytes_to_read = data_mq_->availableToRead();
+    if (num_bytes_to_read) {
+      if (num_bytes_to_read > (bytes - total_read)) {
+        num_bytes_to_read = bytes - total_read;
+      }
+      if (!data_mq_->read(static_cast<MQDataType*>(buffer) + total_read,
+                          num_bytes_to_read)) {
+        LOG(ERROR) << "FMQ datapath reading " << total_read << "/" << bytes
+                   << " failed";
+        return total_read;
+      }
+      total_read += num_bytes_to_read;
+    } else if (timeout_ms >= kReadPollMs) {
+      lock.unlock();
+      usleep(kReadPollMs * 1000);
+      timeout_ms -= kReadPollMs;
+      continue;
+    } else {
+      LOG(DEBUG) << "Data " << total_read << "/" << bytes << " overflow "
+                 << (kFmqReceiveTimeoutMs - timeout_ms) << " ms";
+      return total_read;
+    }
+  } while (total_read < bytes);
+  return total_read;
+}
+
+/***
+ *
+ * Other methods
+ *
+ ***/
+
+void BluetoothAudioSession::ReportControlStatus(bool start_resp,
+                                                BluetoothAudioStatus status) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (observers_.empty()) {
+    LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+                 << " has NO port state observer";
+    return;
+  }
+  for (auto& observer : observers_) {
+    uint16_t cookie = observer.first;
+    std::shared_ptr<PortStatusCallbacks> callback = observer.second;
+    LOG(INFO) << __func__ << " - status=" << toString(status)
+              << " for SessionType=" << toString(session_type_)
+              << ", bluetooth_audio=0x"
+              << ::android::base::StringPrintf("%04x", cookie)
+              << (start_resp ? " started" : " suspended");
+    callback->control_result_cb_(cookie, start_resp, status);
+  }
+}
+
+bool BluetoothAudioSession::GetPresentationPosition(
+    PresentationPosition& presentation_position) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+               << " has NO session";
+    return false;
+  }
+  bool retval = false;
+
+  if (!stack_iface_->getPresentationPosition(&presentation_position).isOk()) {
+    LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+                 << toString(session_type_) << " failed";
+    return false;
+  }
+  return retval;
+}
+
+void BluetoothAudioSession::UpdateSourceMetadata(
+    const struct source_metadata& source_metadata) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+               << " has NO session";
+    return;
+  }
+
+  ssize_t track_count = source_metadata.track_count;
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_) << ","
+            << track_count << " track(s)";
+  if (session_type_ == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
+      session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+    return;
+  }
+
+  SourceMetadata hal_source_metadata;
+  hal_source_metadata.tracks.resize(track_count);
+  for (int i = 0; i < track_count; i++) {
+    hal_source_metadata.tracks[i].usage =
+        static_cast<media::audio::common::AudioUsage>(
+            source_metadata.tracks[i].usage);
+    hal_source_metadata.tracks[i].contentType =
+        static_cast<media::audio::common::AudioContentType>(
+            source_metadata.tracks[i].content_type);
+    hal_source_metadata.tracks[i].gain = source_metadata.tracks[i].gain;
+    LOG(VERBOSE) << __func__ << " - SessionType=" << toString(session_type_)
+                 << ", usage=" << toString(hal_source_metadata.tracks[i].usage)
+                 << ", content="
+                 << toString(hal_source_metadata.tracks[i].contentType)
+                 << ", gain=" << hal_source_metadata.tracks[i].gain;
+  }
+
+  auto hal_retval = stack_iface_->updateSourceMetadata(hal_source_metadata);
+  if (!hal_retval.isOk()) {
+    LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+                 << toString(session_type_) << " failed";
+  }
+}
+
+void BluetoothAudioSession::UpdateSinkMetadata(
+    const struct sink_metadata& sink_metadata) {
+  std::lock_guard<std::recursive_mutex> guard(mutex_);
+  if (!IsSessionReady()) {
+    LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+               << " has NO session";
+    return;
+  }
+
+  ssize_t track_count = sink_metadata.track_count;
+  LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_) << ","
+            << track_count << " track(s)";
+  if (session_type_ == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
+      session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+    return;
+  }
+
+  SinkMetadata hal_sink_metadata;
+  hal_sink_metadata.tracks.resize(track_count);
+  for (int i = 0; i < track_count; i++) {
+    hal_sink_metadata.tracks[i].source =
+        static_cast<media::audio::common::AudioSource>(
+            sink_metadata.tracks[i].source);
+    hal_sink_metadata.tracks[i].gain = sink_metadata.tracks[i].gain;
+    LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+              << ", source=" << sink_metadata.tracks[i].source
+              << ", dest_device=" << sink_metadata.tracks[i].dest_device
+              << ", gain=" << sink_metadata.tracks[i].gain
+              << ", dest_device_address="
+              << sink_metadata.tracks[i].dest_device_address;
+  }
+
+  auto hal_retval = stack_iface_->updateSinkMetadata(hal_sink_metadata);
+  if (!hal_retval.isOk()) {
+    LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+                 << toString(session_type_) << " failed";
+  }
+}
+
+bool BluetoothAudioSession::IsAidlAvailable() {
+  if (is_aidl_checked) return is_aidl_available;
+  is_aidl_available =
+      (AServiceManager_checkService(
+           kDefaultAudioProviderFactoryInterface.c_str()) != nullptr);
+  is_aidl_checked = true;
+  return is_aidl_available;
+}
+
+/***
+ *
+ * BluetoothAudioSessionInstance
+ *
+ ***/
+std::mutex BluetoothAudioSessionInstance::mutex_;
+std::unordered_map<SessionType, std::shared_ptr<BluetoothAudioSession>>
+    BluetoothAudioSessionInstance::sessions_map_;
+
+std::shared_ptr<BluetoothAudioSession>
+BluetoothAudioSessionInstance::GetSessionInstance(
+    const SessionType& session_type) {
+  std::lock_guard<std::mutex> guard(mutex_);
+
+  if (!sessions_map_.empty()) {
+    auto entry = sessions_map_.find(session_type);
+    if (entry != sessions_map_.end()) {
+      return entry->second;
+    }
+  }
+  std::shared_ptr<BluetoothAudioSession> session_ptr =
+      std::make_shared<BluetoothAudioSession>(session_type);
+  sessions_map_[session_type] = session_ptr;
+  return session_ptr;
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.h
new file mode 100644
index 0000000..85fd571
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.h
@@ -0,0 +1,227 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/common/SinkMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
+#include <aidl/android/hardware/bluetooth/audio/IBluetoothAudioProvider.h>
+#include <aidl/android/hardware/bluetooth/audio/IBluetoothAudioProviderFactory.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+#include <fmq/AidlMessageQueue.h>
+#include <hardware/audio.h>
+
+#include <mutex>
+#include <unordered_map>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+
+using ::aidl::android::hardware::audio::common::SinkMetadata;
+using ::aidl::android::hardware::audio::common::SourceMetadata;
+
+using MQDataType = int8_t;
+using MQDataMode = SynchronizedReadWrite;
+using DataMQ = AidlMessageQueue<MQDataType, MQDataMode>;
+using DataMQDesc =
+    ::aidl::android::hardware::common::fmq::MQDescriptor<MQDataType,
+                                                         MQDataMode>;
+
+static constexpr uint16_t kObserversCookieSize = 0x0010;  // 0x0000 ~ 0x000f
+static constexpr uint16_t kObserversCookieUndefined =
+    (static_cast<uint16_t>(SessionType::UNKNOWN) << 8 & 0xff00);
+inline SessionType ObserversCookieGetSessionType(uint16_t cookie) {
+  return static_cast<SessionType>(cookie >> 8 & 0x00ff);
+}
+inline uint16_t ObserversCookieGetInitValue(SessionType session_type) {
+  return (static_cast<uint16_t>(session_type) << 8 & 0xff00);
+}
+inline uint16_t ObserversCookieGetUpperBound(SessionType session_type) {
+  return (static_cast<uint16_t>(session_type) << 8 & 0xff00) +
+         kObserversCookieSize;
+}
+
+/***
+ * This presents the callbacks of started / suspended and session changed,
+ * and the bluetooth_audio module uses to receive the status notification
+ ***/
+struct PortStatusCallbacks {
+  /***
+   * control_result_cb_ - when the Bluetooth stack reports results of
+   * streamStarted or streamSuspended, the BluetoothAudioProvider will invoke
+   * this callback to report to the bluetooth_audio module.
+   * @param: cookie - indicates which bluetooth_audio output should handle
+   * @param: start_resp - this report is for startStream or not
+   * @param: status - the result of startStream
+   ***/
+  std::function<void(uint16_t cookie, bool start_resp,
+                     BluetoothAudioStatus status)>
+      control_result_cb_;
+  /***
+   * session_changed_cb_ - when the Bluetooth stack start / end session, the
+   * BluetoothAudioProvider will invoke this callback to notify to the
+   * bluetooth_audio module.
+   * @param: cookie - indicates which bluetooth_audio output should handle
+   ***/
+  std::function<void(uint16_t cookie)> session_changed_cb_;
+  /***
+   * audio_configuration_changed_cb_ - when the Bluetooth stack change the audio
+   * configuration, the BluetoothAudioProvider will invoke this callback to
+   * notify to the bluetooth_audio module.
+   * @param: cookie - indicates which bluetooth_audio output should handle
+   ***/
+  std::function<void(uint16_t cookie)> audio_configuration_changed_cb_;
+};
+
+class BluetoothAudioSession {
+ public:
+  BluetoothAudioSession(const SessionType& session_type);
+
+  /***
+   * The function helps to check if this session is ready or not
+   * @return: true if the Bluetooth stack has started the specified session
+   ***/
+  bool IsSessionReady();
+
+  /***
+   * The report function is used to report that the Bluetooth stack has started
+   * this session without any failure, and will invoke session_changed_cb_ to
+   * notify those registered bluetooth_audio outputs
+   ***/
+  void OnSessionStarted(const std::shared_ptr<IBluetoothAudioPort> stack_iface,
+                        const DataMQDesc* mq_desc,
+                        const AudioConfiguration& audio_config);
+
+  /***
+   * The report function is used to report that the Bluetooth stack has ended
+   * the session, and will invoke session_changed_cb_ to notify registered
+   * bluetooth_audio outputs
+   ***/
+  void OnSessionEnded();
+
+  /***
+   * The report function is used to report that the Bluetooth stack has notified
+   * the result of startStream or suspendStream, and will invoke
+   * control_result_cb_ to notify registered bluetooth_audio outputs
+   ***/
+  void ReportControlStatus(bool start_resp, BluetoothAudioStatus status);
+
+  /***
+   * The control function helps the bluetooth_audio module to register
+   * PortStatusCallbacks
+   * @return: cookie - the assigned number to this bluetooth_audio output
+   ***/
+  uint16_t RegisterStatusCback(const PortStatusCallbacks& cbacks);
+
+  /***
+   * The control function helps the bluetooth_audio module to unregister
+   * PortStatusCallbacks
+   * @param: cookie - indicates which bluetooth_audio output is
+   ***/
+  void UnregisterStatusCback(uint16_t cookie);
+
+  /***
+   * The control function is for the bluetooth_audio module to get the current
+   * AudioConfiguration
+   ***/
+  const AudioConfiguration& GetAudioConfig();
+
+  /***
+   * The report function is used to report that the Bluetooth stack has notified
+   * the audio configuration changed, and will invoke
+   * audio_configuration_changed_cb_ to notify registered bluetooth_audio
+   * outputs
+   ***/
+  void ReportAudioConfigChanged(const AudioConfiguration& audio_config);
+
+  /***
+   * Those control functions are for the bluetooth_audio module to start,
+   * suspend, stop stream, to check position, and to update metadata.
+   ***/
+  bool StartStream();
+  bool SuspendStream();
+  void StopStream();
+  bool GetPresentationPosition(PresentationPosition& presentation_position);
+  void UpdateSourceMetadata(const struct source_metadata& source_metadata);
+  void UpdateSinkMetadata(const struct sink_metadata& sink_metadata);
+
+  // The control function writes stream to FMQ
+  size_t OutWritePcmData(const void* buffer, size_t bytes);
+  // The control function read stream from FMQ
+  size_t InReadPcmData(void* buffer, size_t bytes);
+
+  // Return if IBluetoothAudioProviderFactory implementation existed
+  static bool IsAidlAvailable();
+
+  static constexpr PcmConfiguration kInvalidPcmConfiguration = {};
+  // can't be constexpr because of non-literal type
+  static const CodecConfiguration kInvalidCodecConfiguration;
+
+  static AudioConfiguration invalidSoftwareAudioConfiguration;
+  static AudioConfiguration invalidOffloadAudioConfiguration;
+  static AudioConfiguration invalidLeOffloadAudioConfig;
+
+ private:
+  // using recursive_mutex to allow hwbinder to re-enter again.
+  std::recursive_mutex mutex_;
+  SessionType session_type_;
+
+  // audio control path to use for both software and offloading
+  std::shared_ptr<IBluetoothAudioPort> stack_iface_;
+  // audio data path (FMQ) for software encoding
+  std::unique_ptr<DataMQ> data_mq_;
+  // audio data configuration for both software and offloading
+  std::unique_ptr<AudioConfiguration> audio_config_;
+
+  // saving those registered bluetooth_audio's callbacks
+  std::unordered_map<uint16_t, std::shared_ptr<struct PortStatusCallbacks>>
+      observers_;
+
+  bool UpdateDataPath(const DataMQDesc* mq_desc);
+  bool UpdateAudioConfig(const AudioConfiguration& audio_config);
+  // invoking the registered session_changed_cb_
+  void ReportSessionStatus();
+
+  static inline std::atomic<bool> is_aidl_checked = false;
+  static inline std::atomic<bool> is_aidl_available = false;
+  static inline const std::string kDefaultAudioProviderFactoryInterface =
+      std::string() + IBluetoothAudioProviderFactory::descriptor + "/default";
+};
+
+class BluetoothAudioSessionInstance {
+ public:
+  // The API is to fetch the specified session of A2DP / Hearing Aid
+  static std::shared_ptr<BluetoothAudioSession> GetSessionInstance(
+      const SessionType& session_type);
+
+ private:
+  static std::mutex mutex_;
+  static std::unordered_map<SessionType, std::shared_ptr<BluetoothAudioSession>>
+      sessions_map_;
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionControl.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionControl.h
new file mode 100644
index 0000000..a3ed428
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionControl.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioSessionControl {
+ public:
+  /***
+   * The control API helps to check if session is ready or not
+   * @return: true if the Bluetooth stack has started th specified session
+   ***/
+  static bool IsSessionReady(const SessionType& session_type) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->IsSessionReady();
+    }
+
+    return false;
+  }
+
+  /***
+   * The control API helps the bluetooth_audio module to register
+   * PortStatusCallbacks
+   * @return: cookie - the assigned number to this bluetooth_audio output
+   ***/
+  static uint16_t RegisterControlResultCback(
+      const SessionType& session_type, const PortStatusCallbacks& cbacks) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->RegisterStatusCback(cbacks);
+    }
+    return kObserversCookieUndefined;
+  }
+
+  /***
+   * The control API helps the bluetooth_audio module to unregister
+   * PortStatusCallbacks
+   * @param: cookie - indicates which bluetooth_audio output is
+   ***/
+  static void UnregisterControlResultCback(const SessionType& session_type,
+                                           uint16_t cookie) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->UnregisterStatusCback(cookie);
+    }
+  }
+
+  /***
+   * The control API for the bluetooth_audio module to get current
+   * AudioConfiguration
+   ***/
+  static const AudioConfiguration GetAudioConfig(
+      const SessionType& session_type) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->GetAudioConfig();
+    } else if (session_type ==
+               SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+      return BluetoothAudioSession::invalidOffloadAudioConfiguration;
+    } else {
+      return BluetoothAudioSession::invalidSoftwareAudioConfiguration;
+    }
+  }
+
+  /***
+   * Those control APIs for the bluetooth_audio module to start / suspend /
+  stop
+   * stream, to check position, and to update metadata.
+  ***/
+  static bool StartStream(const SessionType& session_type) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->StartStream();
+    }
+    return false;
+  }
+
+  static bool SuspendStream(const SessionType& session_type) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->SuspendStream();
+    }
+    return false;
+  }
+
+  static void StopStream(const SessionType& session_type) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->StopStream();
+    }
+  }
+
+  static bool GetPresentationPosition(
+      const SessionType& session_type,
+      PresentationPosition& presentation_position) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->GetPresentationPosition(presentation_position);
+    }
+    return false;
+  }
+
+  static void UpdateSourceMetadata(
+      const SessionType& session_type,
+      const struct source_metadata& source_metadata) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->UpdateSourceMetadata(source_metadata);
+    }
+  }
+
+  static void UpdateSinkMetadata(const SessionType& session_type,
+                                 const struct sink_metadata& sink_metadata) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->UpdateSinkMetadata(sink_metadata);
+    }
+  }
+
+  /***
+   * The control API writes stream to FMQ
+   ***/
+  static size_t OutWritePcmData(const SessionType& session_type,
+                                const void* buffer, size_t bytes) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->OutWritePcmData(buffer, bytes);
+    }
+    return 0;
+  }
+
+  /***
+   * The control API reads stream from FMQ
+   ***/
+  static size_t InReadPcmData(const SessionType& session_type, void* buffer,
+                              size_t bytes) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      return session_ptr->InReadPcmData(buffer, bytes);
+    }
+    return 0;
+  }
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionReport.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionReport.h
new file mode 100644
index 0000000..18569c3
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionReport.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioSessionReport {
+ public:
+  /***
+   * The API reports the Bluetooth stack has started the session, and will
+   * inform registered bluetooth_audio outputs
+   ***/
+  static void OnSessionStarted(
+      const SessionType& session_type,
+      const std::shared_ptr<IBluetoothAudioPort> host_iface,
+      const DataMQDesc* data_mq, const AudioConfiguration& audio_config) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->OnSessionStarted(host_iface, data_mq, audio_config);
+    }
+  }
+
+  /***
+   * The API reports the Bluetooth stack has ended the session, and will
+   * inform registered bluetooth_audio outputs
+   ***/
+  static void OnSessionEnded(const SessionType& session_type) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->OnSessionEnded();
+    }
+  }
+
+  /***
+   * The API reports the Bluetooth stack has replied the result of startStream
+   * or suspendStream, and will inform registered bluetooth_audio outputs
+   ***/
+  static void ReportControlStatus(const SessionType& session_type,
+                                  const bool& start_resp,
+                                  BluetoothAudioStatus status) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->ReportControlStatus(start_resp, status);
+    }
+  }
+  /***
+   * The API reports the Bluetooth stack has replied the changed of the audio
+   * configuration, and will inform registered bluetooth_audio outputs
+   ***/
+  static void ReportAudioConfigChanged(const SessionType& session_type,
+                                       const AudioConfiguration& audio_config) {
+    std::shared_ptr<BluetoothAudioSession> session_ptr =
+        BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+    if (session_ptr != nullptr) {
+      session_ptr->ReportAudioConfigChanged(audio_config);
+    }
+  }
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware.cpp b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware.cpp
new file mode 100644
index 0000000..91e0238
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware.cpp
@@ -0,0 +1,775 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BtAudioNakahara"
+
+#include <aidl/android/hardware/bluetooth/audio/AudioConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/BluetoothAudioStatus.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+#include <android-base/logging.h>
+
+#include <functional>
+#include <unordered_map>
+
+#include "../aidl_session/BluetoothAudioSession.h"
+#include "../aidl_session/BluetoothAudioSessionControl.h"
+#include "HidlToAidlMiddleware_2_0.h"
+#include "HidlToAidlMiddleware_2_1.h"
+#include "HidlToAidlMiddleware_2_2.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using HidlStatus = ::android::hardware::bluetooth::audio::V2_0::Status;
+using PcmConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::PcmParameters;
+using SampleRate_2_0 = ::android::hardware::bluetooth::audio::V2_0::SampleRate;
+using ChannelMode_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::ChannelMode;
+using BitsPerSample_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::BitsPerSample;
+using CodecConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::CodecConfiguration;
+using CodecType_2_0 = ::android::hardware::bluetooth::audio::V2_0::CodecType;
+using SbcConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::SbcParameters;
+using AacConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::AacParameters;
+using LdacConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::LdacParameters;
+using AptxConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::AptxParameters;
+using SbcAllocMethod_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::SbcAllocMethod;
+using SbcBlockLength_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::SbcBlockLength;
+using SbcChannelMode_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::SbcChannelMode;
+using SbcNumSubbands_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::SbcNumSubbands;
+using AacObjectType_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::AacObjectType;
+using AacVarBitRate_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::AacVariableBitRate;
+using LdacChannelMode_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::LdacChannelMode;
+using LdacQualityIndex_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::LdacQualityIndex;
+
+using PcmConfig_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::PcmParameters;
+using SampleRate_2_1 = ::android::hardware::bluetooth::audio::V2_1::SampleRate;
+using Lc3CodecConfig_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::Lc3CodecConfiguration;
+using Lc3Config_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::Lc3Parameters;
+using Lc3FrameDuration_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::Lc3FrameDuration;
+
+using LeAudioConfig_2_2 =
+    ::android::hardware::bluetooth::audio::V2_2::LeAudioConfiguration;
+using LeAudioMode_2_2 =
+    ::android::hardware::bluetooth::audio::V2_2::LeAudioMode;
+
+std::mutex legacy_callback_lock;
+std::unordered_map<
+    SessionType,
+    std::unordered_map<uint16_t, std::shared_ptr<PortStatusCallbacks_2_2>>>
+    legacy_callback_table;
+
+const static std::unordered_map<SessionType_2_0, SessionType>
+    session_type_2_0_to_aidl_map{
+        {SessionType_2_0::A2DP_SOFTWARE_ENCODING_DATAPATH,
+         SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH},
+        {SessionType_2_0::A2DP_HARDWARE_OFFLOAD_DATAPATH,
+         SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH},
+        {SessionType_2_0::HEARING_AID_SOFTWARE_ENCODING_DATAPATH,
+         SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH},
+    };
+
+const static std::unordered_map<SessionType_2_1, SessionType>
+    session_type_2_1_to_aidl_map{
+        {SessionType_2_1::A2DP_SOFTWARE_ENCODING_DATAPATH,
+         SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH},
+        {SessionType_2_1::A2DP_HARDWARE_OFFLOAD_DATAPATH,
+         SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH},
+        {SessionType_2_1::HEARING_AID_SOFTWARE_ENCODING_DATAPATH,
+         SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH},
+        {SessionType_2_1::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH,
+         SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH},
+        {SessionType_2_1::LE_AUDIO_SOFTWARE_DECODED_DATAPATH,
+         SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH},
+        {SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH,
+         SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH},
+        {SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH,
+         SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH},
+    };
+
+const static std::unordered_map<int32_t, SampleRate_2_0>
+    sample_rate_to_hidl_2_0_map{
+        {44100, SampleRate_2_0::RATE_44100},
+        {48000, SampleRate_2_0::RATE_48000},
+        {88200, SampleRate_2_0::RATE_88200},
+        {96000, SampleRate_2_0::RATE_96000},
+        {176400, SampleRate_2_0::RATE_176400},
+        {192000, SampleRate_2_0::RATE_192000},
+        {16000, SampleRate_2_0::RATE_16000},
+        {24000, SampleRate_2_0::RATE_24000},
+    };
+
+const static std::unordered_map<int32_t, SampleRate_2_1>
+    sample_rate_to_hidl_2_1_map{
+        {44100, SampleRate_2_1::RATE_44100},
+        {48000, SampleRate_2_1::RATE_48000},
+        {88200, SampleRate_2_1::RATE_88200},
+        {96000, SampleRate_2_1::RATE_96000},
+        {176400, SampleRate_2_1::RATE_176400},
+        {192000, SampleRate_2_1::RATE_192000},
+        {16000, SampleRate_2_1::RATE_16000},
+        {24000, SampleRate_2_1::RATE_24000},
+        {8000, SampleRate_2_1::RATE_8000},
+        {32000, SampleRate_2_1::RATE_32000},
+    };
+
+const static std::unordered_map<CodecType, CodecType_2_0>
+    codec_type_to_hidl_2_0_map{
+        {CodecType::UNKNOWN, CodecType_2_0::UNKNOWN},
+        {CodecType::SBC, CodecType_2_0::SBC},
+        {CodecType::AAC, CodecType_2_0::AAC},
+        {CodecType::APTX, CodecType_2_0::APTX},
+        {CodecType::APTX_HD, CodecType_2_0::APTX_HD},
+        {CodecType::LDAC, CodecType_2_0::LDAC},
+        {CodecType::LC3, CodecType_2_0::UNKNOWN},
+    };
+
+const static std::unordered_map<SbcChannelMode, SbcChannelMode_2_0>
+    sbc_channel_mode_to_hidl_2_0_map{
+        {SbcChannelMode::UNKNOWN, SbcChannelMode_2_0::UNKNOWN},
+        {SbcChannelMode::JOINT_STEREO, SbcChannelMode_2_0::JOINT_STEREO},
+        {SbcChannelMode::STEREO, SbcChannelMode_2_0::STEREO},
+        {SbcChannelMode::DUAL, SbcChannelMode_2_0::DUAL},
+        {SbcChannelMode::MONO, SbcChannelMode_2_0::MONO},
+    };
+
+const static std::unordered_map<int8_t, SbcBlockLength_2_0>
+    sbc_block_length_to_hidl_map{
+        {4, SbcBlockLength_2_0::BLOCKS_4},
+        {8, SbcBlockLength_2_0::BLOCKS_8},
+        {12, SbcBlockLength_2_0::BLOCKS_12},
+        {16, SbcBlockLength_2_0::BLOCKS_16},
+    };
+
+const static std::unordered_map<int8_t, SbcNumSubbands_2_0>
+    sbc_subbands_to_hidl_map{
+        {4, SbcNumSubbands_2_0::SUBBAND_4},
+        {8, SbcNumSubbands_2_0::SUBBAND_8},
+    };
+
+const static std::unordered_map<SbcAllocMethod, SbcAllocMethod_2_0>
+    sbc_alloc_method_to_hidl_map{
+        {SbcAllocMethod::ALLOC_MD_S, SbcAllocMethod_2_0::ALLOC_MD_S},
+        {SbcAllocMethod::ALLOC_MD_L, SbcAllocMethod_2_0::ALLOC_MD_L},
+    };
+
+const static std::unordered_map<AacObjectType, AacObjectType_2_0>
+    aac_object_type_to_hidl_map{
+        {AacObjectType::MPEG2_LC, AacObjectType_2_0::MPEG2_LC},
+        {AacObjectType::MPEG4_LC, AacObjectType_2_0::MPEG4_LC},
+        {AacObjectType::MPEG4_LTP, AacObjectType_2_0::MPEG4_LTP},
+        {AacObjectType::MPEG4_SCALABLE, AacObjectType_2_0::MPEG4_SCALABLE},
+    };
+
+const static std::unordered_map<LdacChannelMode, LdacChannelMode_2_0>
+    ldac_channel_mode_to_hidl_map{
+        {LdacChannelMode::UNKNOWN, LdacChannelMode_2_0::UNKNOWN},
+        {LdacChannelMode::STEREO, LdacChannelMode_2_0::STEREO},
+        {LdacChannelMode::DUAL, LdacChannelMode_2_0::DUAL},
+        {LdacChannelMode::MONO, LdacChannelMode_2_0::MONO},
+    };
+
+const static std::unordered_map<LdacQualityIndex, LdacQualityIndex_2_0>
+    ldac_qindex_to_hidl_map{
+        {LdacQualityIndex::HIGH, LdacQualityIndex_2_0::QUALITY_HIGH},
+        {LdacQualityIndex::MID, LdacQualityIndex_2_0::QUALITY_MID},
+        {LdacQualityIndex::LOW, LdacQualityIndex_2_0::QUALITY_LOW},
+        {LdacQualityIndex::ABR, LdacQualityIndex_2_0::QUALITY_ABR},
+    };
+
+const static std::unordered_map<LeAudioMode, LeAudioMode_2_2>
+    leaudio_mode_to_hidl_map{
+        {LeAudioMode::UNKNOWN, LeAudioMode_2_2::UNKNOWN},
+        {LeAudioMode::UNICAST, LeAudioMode_2_2::UNICAST},
+        {LeAudioMode::BROADCAST, LeAudioMode_2_2::BROADCAST},
+    };
+
+inline SessionType from_session_type_2_0(
+    const SessionType_2_0& session_type_hidl) {
+  auto it = session_type_2_0_to_aidl_map.find(session_type_hidl);
+  if (it != session_type_2_0_to_aidl_map.end()) return it->second;
+  return SessionType::UNKNOWN;
+}
+
+inline SessionType from_session_type_2_1(
+    const SessionType_2_1& session_type_hidl) {
+  auto it = session_type_2_1_to_aidl_map.find(session_type_hidl);
+  if (it != session_type_2_1_to_aidl_map.end()) return it->second;
+  return SessionType::UNKNOWN;
+}
+
+inline HidlStatus to_hidl_status(const BluetoothAudioStatus& status) {
+  switch (status) {
+    case BluetoothAudioStatus::SUCCESS:
+      return HidlStatus::SUCCESS;
+    case BluetoothAudioStatus::UNSUPPORTED_CODEC_CONFIGURATION:
+      return HidlStatus::UNSUPPORTED_CODEC_CONFIGURATION;
+    default:
+      return HidlStatus::FAILURE;
+  }
+}
+
+inline SampleRate_2_0 to_hidl_sample_rate_2_0(const int32_t sample_rate_hz) {
+  auto it = sample_rate_to_hidl_2_0_map.find(sample_rate_hz);
+  if (it != sample_rate_to_hidl_2_0_map.end()) return it->second;
+  return SampleRate_2_0::RATE_UNKNOWN;
+}
+
+inline SampleRate_2_1 to_hidl_sample_rate_2_1(const int32_t sample_rate_hz) {
+  auto it = sample_rate_to_hidl_2_1_map.find(sample_rate_hz);
+  if (it != sample_rate_to_hidl_2_1_map.end()) return it->second;
+  return SampleRate_2_1::RATE_UNKNOWN;
+}
+
+inline BitsPerSample_2_0 to_hidl_bits_per_sample(const int8_t bit_per_sample) {
+  switch (bit_per_sample) {
+    case 16:
+      return BitsPerSample_2_0::BITS_16;
+    case 24:
+      return BitsPerSample_2_0::BITS_24;
+    case 32:
+      return BitsPerSample_2_0::BITS_32;
+    default:
+      return BitsPerSample_2_0::BITS_UNKNOWN;
+  }
+}
+
+inline ChannelMode_2_0 to_hidl_channel_mode(const ChannelMode channel_mode) {
+  switch (channel_mode) {
+    case ChannelMode::MONO:
+      return ChannelMode_2_0::MONO;
+    case ChannelMode::STEREO:
+      return ChannelMode_2_0::STEREO;
+    default:
+      return ChannelMode_2_0::UNKNOWN;
+  }
+}
+
+inline PcmConfig_2_0 to_hidl_pcm_config_2_0(
+    const PcmConfiguration& pcm_config) {
+  PcmConfig_2_0 hidl_pcm_config;
+  hidl_pcm_config.sampleRate = to_hidl_sample_rate_2_0(pcm_config.sampleRateHz);
+  hidl_pcm_config.channelMode = to_hidl_channel_mode(pcm_config.channelMode);
+  hidl_pcm_config.bitsPerSample =
+      to_hidl_bits_per_sample(pcm_config.bitsPerSample);
+  return hidl_pcm_config;
+}
+
+inline CodecType_2_0 to_hidl_codec_type_2_0(const CodecType codec_type) {
+  auto it = codec_type_to_hidl_2_0_map.find(codec_type);
+  if (it != codec_type_to_hidl_2_0_map.end()) return it->second;
+  return CodecType_2_0::UNKNOWN;
+}
+
+inline SbcConfig_2_0 to_hidl_sbc_config(const SbcConfiguration sbc_config) {
+  SbcConfig_2_0 hidl_sbc_config;
+  hidl_sbc_config.minBitpool = sbc_config.minBitpool;
+  hidl_sbc_config.maxBitpool = sbc_config.maxBitpool;
+  hidl_sbc_config.sampleRate = to_hidl_sample_rate_2_0(sbc_config.sampleRateHz);
+  hidl_sbc_config.bitsPerSample =
+      to_hidl_bits_per_sample(sbc_config.bitsPerSample);
+  if (sbc_channel_mode_to_hidl_2_0_map.find(sbc_config.channelMode) !=
+      sbc_channel_mode_to_hidl_2_0_map.end()) {
+    hidl_sbc_config.channelMode =
+        sbc_channel_mode_to_hidl_2_0_map.at(sbc_config.channelMode);
+  }
+  if (sbc_block_length_to_hidl_map.find(sbc_config.blockLength) !=
+      sbc_block_length_to_hidl_map.end()) {
+    hidl_sbc_config.blockLength =
+        sbc_block_length_to_hidl_map.at(sbc_config.blockLength);
+  }
+  if (sbc_subbands_to_hidl_map.find(sbc_config.numSubbands) !=
+      sbc_subbands_to_hidl_map.end()) {
+    hidl_sbc_config.numSubbands =
+        sbc_subbands_to_hidl_map.at(sbc_config.numSubbands);
+  }
+  if (sbc_alloc_method_to_hidl_map.find(sbc_config.allocMethod) !=
+      sbc_alloc_method_to_hidl_map.end()) {
+    hidl_sbc_config.allocMethod =
+        sbc_alloc_method_to_hidl_map.at(sbc_config.allocMethod);
+  }
+  return hidl_sbc_config;
+}
+
+inline AacConfig_2_0 to_hidl_aac_config(const AacConfiguration aac_config) {
+  AacConfig_2_0 hidl_aac_config;
+  hidl_aac_config.sampleRate = to_hidl_sample_rate_2_0(aac_config.sampleRateHz);
+  hidl_aac_config.bitsPerSample =
+      to_hidl_bits_per_sample(aac_config.bitsPerSample);
+  hidl_aac_config.channelMode = to_hidl_channel_mode(aac_config.channelMode);
+  if (aac_object_type_to_hidl_map.find(aac_config.objectType) !=
+      aac_object_type_to_hidl_map.end()) {
+    hidl_aac_config.objectType =
+        aac_object_type_to_hidl_map.at(aac_config.objectType);
+  }
+  hidl_aac_config.variableBitRateEnabled = aac_config.variableBitRateEnabled
+                                               ? AacVarBitRate_2_0::ENABLED
+                                               : AacVarBitRate_2_0::DISABLED;
+  return hidl_aac_config;
+}
+
+inline LdacConfig_2_0 to_hidl_ldac_config(const LdacConfiguration ldac_config) {
+  LdacConfig_2_0 hidl_ldac_config;
+  hidl_ldac_config.sampleRate =
+      to_hidl_sample_rate_2_0(ldac_config.sampleRateHz);
+  hidl_ldac_config.bitsPerSample =
+      to_hidl_bits_per_sample(ldac_config.bitsPerSample);
+  if (ldac_channel_mode_to_hidl_map.find(ldac_config.channelMode) !=
+      ldac_channel_mode_to_hidl_map.end()) {
+    hidl_ldac_config.channelMode =
+        ldac_channel_mode_to_hidl_map.at(ldac_config.channelMode);
+  }
+  if (ldac_qindex_to_hidl_map.find(ldac_config.qualityIndex) !=
+      ldac_qindex_to_hidl_map.end()) {
+    hidl_ldac_config.qualityIndex =
+        ldac_qindex_to_hidl_map.at(ldac_config.qualityIndex);
+  }
+  return hidl_ldac_config;
+}
+
+inline AptxConfig_2_0 to_hidl_aptx_config(const AptxConfiguration aptx_config) {
+  AptxConfig_2_0 hidl_aptx_config;
+  hidl_aptx_config.sampleRate =
+      to_hidl_sample_rate_2_0(aptx_config.sampleRateHz);
+  hidl_aptx_config.bitsPerSample =
+      to_hidl_bits_per_sample(aptx_config.bitsPerSample);
+  hidl_aptx_config.channelMode = to_hidl_channel_mode(aptx_config.channelMode);
+  return hidl_aptx_config;
+}
+
+inline CodecConfig_2_0 to_hidl_codec_config_2_0(
+    const CodecConfiguration& codec_config) {
+  CodecConfig_2_0 hidl_codec_config;
+  hidl_codec_config.codecType = to_hidl_codec_type_2_0(codec_config.codecType);
+  hidl_codec_config.encodedAudioBitrate =
+      static_cast<uint32_t>(codec_config.encodedAudioBitrate);
+  hidl_codec_config.peerMtu = static_cast<uint32_t>(codec_config.peerMtu);
+  hidl_codec_config.isScmstEnabled = codec_config.isScmstEnabled;
+  switch (codec_config.config.getTag()) {
+    case CodecConfiguration::CodecSpecific::sbcConfig:
+      hidl_codec_config.config.sbcConfig(to_hidl_sbc_config(
+          codec_config.config
+              .get<CodecConfiguration::CodecSpecific::sbcConfig>()));
+      break;
+    case CodecConfiguration::CodecSpecific::aacConfig:
+      hidl_codec_config.config.aacConfig(to_hidl_aac_config(
+          codec_config.config
+              .get<CodecConfiguration::CodecSpecific::aacConfig>()));
+      break;
+    case CodecConfiguration::CodecSpecific::ldacConfig:
+      hidl_codec_config.config.ldacConfig(to_hidl_ldac_config(
+          codec_config.config
+              .get<CodecConfiguration::CodecSpecific::ldacConfig>()));
+      break;
+    case CodecConfiguration::CodecSpecific::aptxConfig:
+      hidl_codec_config.config.aptxConfig(to_hidl_aptx_config(
+          codec_config.config
+              .get<CodecConfiguration::CodecSpecific::aptxConfig>()));
+      break;
+    default:
+      break;
+  }
+  return hidl_codec_config;
+}
+
+inline AudioConfig_2_0 to_hidl_audio_config_2_0(
+    const AudioConfiguration& audio_config) {
+  AudioConfig_2_0 hidl_audio_config;
+  if (audio_config.getTag() == AudioConfiguration::pcmConfig) {
+    hidl_audio_config.pcmConfig(to_hidl_pcm_config_2_0(
+        audio_config.get<AudioConfiguration::pcmConfig>()));
+  } else if (audio_config.getTag() == AudioConfiguration::a2dpConfig) {
+    hidl_audio_config.codecConfig(to_hidl_codec_config_2_0(
+        audio_config.get<AudioConfiguration::a2dpConfig>()));
+  }
+  return hidl_audio_config;
+}
+
+inline PcmConfig_2_1 to_hidl_pcm_config_2_1(
+    const PcmConfiguration& pcm_config) {
+  PcmConfig_2_1 hidl_pcm_config;
+  hidl_pcm_config.sampleRate = to_hidl_sample_rate_2_1(pcm_config.sampleRateHz);
+  hidl_pcm_config.channelMode = to_hidl_channel_mode(pcm_config.channelMode);
+  hidl_pcm_config.bitsPerSample =
+      to_hidl_bits_per_sample(pcm_config.bitsPerSample);
+  hidl_pcm_config.dataIntervalUs =
+      static_cast<uint32_t>(pcm_config.dataIntervalUs);
+  return hidl_pcm_config;
+}
+
+inline Lc3Config_2_1 to_hidl_lc3_config_2_1(
+    const Lc3Configuration& lc3_config) {
+  Lc3Config_2_1 hidl_lc3_config;
+  hidl_lc3_config.pcmBitDepth = to_hidl_bits_per_sample(lc3_config.pcmBitDepth);
+  hidl_lc3_config.samplingFrequency =
+      to_hidl_sample_rate_2_1(lc3_config.samplingFrequencyHz);
+  if (lc3_config.samplingFrequencyHz == 10000)
+    hidl_lc3_config.frameDuration = Lc3FrameDuration_2_1::DURATION_10000US;
+  else if (lc3_config.samplingFrequencyHz == 7500)
+    hidl_lc3_config.frameDuration = Lc3FrameDuration_2_1::DURATION_7500US;
+  hidl_lc3_config.octetsPerFrame =
+      static_cast<uint32_t>(lc3_config.octetsPerFrame);
+  hidl_lc3_config.blocksPerSdu = static_cast<uint32_t>(lc3_config.blocksPerSdu);
+  return hidl_lc3_config;
+}
+
+inline Lc3CodecConfig_2_1 to_hidl_leaudio_config_2_1(
+    const LeAudioConfiguration& leaudio_config) {
+  auto& unicast_config =
+      leaudio_config.modeConfig
+          .get<LeAudioConfiguration::LeAudioModeConfig::unicastConfig>();
+
+  auto& le_codec_config = unicast_config.leAudioCodecConfig
+                              .get<LeAudioCodecConfiguration::lc3Config>();
+
+  Lc3CodecConfig_2_1 hidl_lc3_codec_config;
+  hidl_lc3_codec_config.lc3Config = to_hidl_lc3_config_2_1(le_codec_config);
+
+  hidl_lc3_codec_config.audioChannelAllocation =
+      unicast_config.streamMap.size();
+
+  return hidl_lc3_codec_config;
+}
+
+inline LeAudioConfig_2_2 to_hidl_leaudio_config_2_2(
+    const LeAudioConfiguration& leaudio_config) {
+  LeAudioConfig_2_2 hidl_leaudio_config;
+  if (leaudio_mode_to_hidl_map.find(leaudio_config.mode) !=
+      leaudio_mode_to_hidl_map.end()) {
+    hidl_leaudio_config.mode = leaudio_mode_to_hidl_map.at(leaudio_config.mode);
+  }
+
+  if (leaudio_config.modeConfig.getTag() ==
+      LeAudioConfiguration::LeAudioModeConfig::unicastConfig) {
+    auto& unicast_config =
+        leaudio_config.modeConfig
+            .get<LeAudioConfiguration::LeAudioModeConfig::unicastConfig>();
+    ::android::hardware::bluetooth::audio::V2_2::UnicastConfig
+        hidl_unicast_config;
+    hidl_unicast_config.peerDelay =
+        static_cast<uint32_t>(unicast_config.peerDelay);
+
+    auto& lc3_config = unicast_config.leAudioCodecConfig
+                           .get<LeAudioCodecConfiguration::lc3Config>();
+    hidl_unicast_config.lc3Config = to_hidl_lc3_config_2_1(lc3_config);
+
+    hidl_unicast_config.streamMap.resize(unicast_config.streamMap.size());
+    for (int i = 0; i < unicast_config.streamMap.size(); i++) {
+      hidl_unicast_config.streamMap[i].audioChannelAllocation =
+          static_cast<uint32_t>(
+              unicast_config.streamMap[i].audioChannelAllocation);
+      hidl_unicast_config.streamMap[i].streamHandle =
+          static_cast<uint16_t>(unicast_config.streamMap[i].streamHandle);
+    }
+  } else if (leaudio_config.modeConfig.getTag() ==
+             LeAudioConfiguration::LeAudioModeConfig::broadcastConfig) {
+    auto bcast_config =
+        leaudio_config.modeConfig
+            .get<LeAudioConfiguration::LeAudioModeConfig::broadcastConfig>();
+    ::android::hardware::bluetooth::audio::V2_2::BroadcastConfig
+        hidl_bcast_config;
+    hidl_bcast_config.streamMap.resize(bcast_config.streamMap.size());
+    for (int i = 0; i < bcast_config.streamMap.size(); i++) {
+      hidl_bcast_config.streamMap[i].audioChannelAllocation =
+          static_cast<uint32_t>(
+              bcast_config.streamMap[i].audioChannelAllocation);
+      hidl_bcast_config.streamMap[i].streamHandle =
+          static_cast<uint16_t>(bcast_config.streamMap[i].streamHandle);
+      hidl_bcast_config.streamMap[i].lc3Config = to_hidl_lc3_config_2_1(
+          bcast_config.streamMap[i]
+              .leAudioCodecConfig.get<LeAudioCodecConfiguration::lc3Config>());
+    }
+  }
+  return hidl_leaudio_config;
+}
+
+inline AudioConfig_2_1 to_hidl_audio_config_2_1(
+    const AudioConfiguration& audio_config) {
+  AudioConfig_2_1 hidl_audio_config;
+  switch (audio_config.getTag()) {
+    case AudioConfiguration::pcmConfig:
+      hidl_audio_config.pcmConfig(to_hidl_pcm_config_2_1(
+          audio_config.get<AudioConfiguration::pcmConfig>()));
+      break;
+    case AudioConfiguration::a2dpConfig:
+      hidl_audio_config.codecConfig(to_hidl_codec_config_2_0(
+          audio_config.get<AudioConfiguration::a2dpConfig>()));
+      break;
+    case AudioConfiguration::leAudioConfig:
+      hidl_audio_config.leAudioCodecConfig(to_hidl_leaudio_config_2_1(
+          audio_config.get<AudioConfiguration::leAudioConfig>()));
+      break;
+  }
+  return hidl_audio_config;
+}
+
+inline AudioConfig_2_2 to_hidl_audio_config_2_2(
+    const AudioConfiguration& audio_config) {
+  AudioConfig_2_2 hidl_audio_config;
+  switch (audio_config.getTag()) {
+    case AudioConfiguration::pcmConfig:
+      hidl_audio_config.pcmConfig(to_hidl_pcm_config_2_1(
+          audio_config.get<AudioConfiguration::pcmConfig>()));
+      break;
+    case AudioConfiguration::a2dpConfig:
+      hidl_audio_config.codecConfig(to_hidl_codec_config_2_0(
+          audio_config.get<AudioConfiguration::a2dpConfig>()));
+      break;
+    case AudioConfiguration::leAudioConfig:
+      hidl_audio_config.leAudioConfig(to_hidl_leaudio_config_2_2(
+          audio_config.get<AudioConfiguration::leAudioConfig>()));
+      break;
+  }
+  return hidl_audio_config;
+}
+
+/***
+ *
+ * 2.0
+ *
+ ***/
+
+bool HidlToAidlMiddleware_2_0::IsSessionReady(
+    const SessionType_2_0& session_type) {
+  return BluetoothAudioSessionControl::IsSessionReady(
+      from_session_type_2_0(session_type));
+}
+
+uint16_t HidlToAidlMiddleware_2_0::RegisterControlResultCback(
+    const SessionType_2_0& session_type,
+    const PortStatusCallbacks_2_0& cbacks) {
+  PortStatusCallbacks_2_2 callback_2_2{
+      .control_result_cb_ = cbacks.control_result_cb_,
+      .session_changed_cb_ = cbacks.session_changed_cb_,
+  };
+  return HidlToAidlMiddleware_2_2::RegisterControlResultCback(
+      static_cast<SessionType_2_1>(session_type), callback_2_2);
+}
+
+void HidlToAidlMiddleware_2_0::UnregisterControlResultCback(
+    const SessionType_2_0& session_type, uint16_t cookie) {
+  HidlToAidlMiddleware_2_2::UnregisterControlResultCback(
+      static_cast<SessionType_2_1>(session_type), cookie);
+}
+
+const AudioConfig_2_0 HidlToAidlMiddleware_2_0::GetAudioConfig(
+    const SessionType_2_0& session_type) {
+  return to_hidl_audio_config_2_0(BluetoothAudioSessionControl::GetAudioConfig(
+      from_session_type_2_0(session_type)));
+}
+
+bool HidlToAidlMiddleware_2_0::StartStream(
+    const SessionType_2_0& session_type) {
+  return BluetoothAudioSessionControl::StartStream(
+      from_session_type_2_0(session_type));
+}
+
+void HidlToAidlMiddleware_2_0::StopStream(const SessionType_2_0& session_type) {
+  return BluetoothAudioSessionControl::StopStream(
+      from_session_type_2_0(session_type));
+}
+
+bool HidlToAidlMiddleware_2_0::SuspendStream(
+    const SessionType_2_0& session_type) {
+  return BluetoothAudioSessionControl::SuspendStream(
+      from_session_type_2_0(session_type));
+}
+
+bool HidlToAidlMiddleware_2_0::GetPresentationPosition(
+    const SessionType_2_0& session_type, uint64_t* remote_delay_report_ns,
+    uint64_t* total_bytes_readed, timespec* data_position) {
+  PresentationPosition presentation_position;
+  auto ret_val = BluetoothAudioSessionControl::GetPresentationPosition(
+      from_session_type_2_0(session_type), presentation_position);
+  if (remote_delay_report_ns)
+    *remote_delay_report_ns = presentation_position.remoteDeviceAudioDelayNanos;
+  if (total_bytes_readed)
+    *total_bytes_readed = presentation_position.transmittedOctets;
+  if (data_position)
+    *data_position = {
+        .tv_sec = static_cast<__kernel_old_time_t>(
+            presentation_position.transmittedOctetsTimestamp.tvSec),
+        .tv_nsec = static_cast<long>(
+            presentation_position.transmittedOctetsTimestamp.tvNSec)};
+  return ret_val;
+}
+
+void HidlToAidlMiddleware_2_0::UpdateTracksMetadata(
+    const SessionType_2_0& session_type,
+    const struct source_metadata* source_metadata) {
+  return BluetoothAudioSessionControl::UpdateSourceMetadata(
+      from_session_type_2_0(session_type), *source_metadata);
+}
+
+size_t HidlToAidlMiddleware_2_0::OutWritePcmData(
+    const SessionType_2_0& session_type, const void* buffer, size_t bytes) {
+  return BluetoothAudioSessionControl::OutWritePcmData(
+      from_session_type_2_0(session_type), buffer, bytes);
+}
+
+bool HidlToAidlMiddleware_2_0::IsAidlAvailable() {
+  return BluetoothAudioSession::IsAidlAvailable();
+}
+
+/***
+ *
+ * 2.1
+ *
+ ***/
+
+const AudioConfig_2_1 HidlToAidlMiddleware_2_1::GetAudioConfig(
+    const SessionType_2_1& session_type) {
+  return to_hidl_audio_config_2_1(BluetoothAudioSessionControl::GetAudioConfig(
+      from_session_type_2_1(session_type)));
+}
+
+/***
+ *
+ * 2.2
+ *
+ ***/
+
+bool HidlToAidlMiddleware_2_2::IsSessionReady(
+    const SessionType_2_1& session_type) {
+  return BluetoothAudioSessionControl::IsSessionReady(
+      from_session_type_2_1(session_type));
+}
+
+uint16_t HidlToAidlMiddleware_2_2::RegisterControlResultCback(
+    const SessionType_2_1& session_type,
+    const PortStatusCallbacks_2_2& cbacks) {
+  LOG(INFO) << __func__ << ": " << toString(session_type);
+  auto aidl_session_type = from_session_type_2_1(session_type);
+  // Pass the exact reference to the lambda
+  auto& session_legacy_callback_table =
+      legacy_callback_table[aidl_session_type];
+  PortStatusCallbacks aidl_callbacks{};
+  if (cbacks.control_result_cb_) {
+    aidl_callbacks.control_result_cb_ =
+        [&session_legacy_callback_table](uint16_t cookie, bool start_resp,
+                                         const BluetoothAudioStatus& status) {
+          if (session_legacy_callback_table.find(cookie) ==
+              session_legacy_callback_table.end()) {
+            LOG(ERROR) << __func__ << ": Unknown callback invoked!";
+            return;
+          }
+          auto& cback = session_legacy_callback_table[cookie];
+          cback->control_result_cb_(cookie, start_resp, to_hidl_status(status));
+        };
+  }
+  if (cbacks.session_changed_cb_) {
+    aidl_callbacks.session_changed_cb_ =
+        [&session_legacy_callback_table](uint16_t cookie) {
+          if (session_legacy_callback_table.find(cookie) ==
+              session_legacy_callback_table.end()) {
+            LOG(ERROR) << __func__ << ": Unknown callback invoked!";
+            return;
+          }
+          auto& cback = session_legacy_callback_table[cookie];
+          cback->session_changed_cb_(cookie);
+        };
+  };
+  if (cbacks.audio_configuration_changed_cb_) {
+    aidl_callbacks.audio_configuration_changed_cb_ =
+        [&session_legacy_callback_table](uint16_t cookie) {
+          if (session_legacy_callback_table.find(cookie) ==
+              session_legacy_callback_table.end()) {
+            LOG(ERROR) << __func__ << ": Unknown callback invoked!";
+            return;
+          }
+          auto& cback = session_legacy_callback_table[cookie];
+          cback->audio_configuration_changed_cb_(cookie);
+        };
+  };
+  auto cookie = BluetoothAudioSessionControl::RegisterControlResultCback(
+      aidl_session_type, aidl_callbacks);
+  {
+    std::lock_guard<std::mutex> guard(legacy_callback_lock);
+    session_legacy_callback_table[cookie] =
+        std::make_shared<PortStatusCallbacks_2_2>(cbacks);
+  }
+  return cookie;
+}
+
+void HidlToAidlMiddleware_2_2::UnregisterControlResultCback(
+    const SessionType_2_1& session_type, uint16_t cookie) {
+  LOG(INFO) << __func__ << ": " << toString(session_type);
+  auto aidl_session_type = from_session_type_2_1(session_type);
+  BluetoothAudioSessionControl::UnregisterControlResultCback(aidl_session_type,
+                                                             cookie);
+  auto& session_callback_table = legacy_callback_table[aidl_session_type];
+  if (session_callback_table.find(cookie) != session_callback_table.end()) {
+    std::lock_guard<std::mutex> guard(legacy_callback_lock);
+    session_callback_table.erase(cookie);
+  }
+}
+
+const AudioConfig_2_2 HidlToAidlMiddleware_2_2::GetAudioConfig(
+    const SessionType_2_1& session_type) {
+  return to_hidl_audio_config_2_2(BluetoothAudioSessionControl::GetAudioConfig(
+      from_session_type_2_1(session_type)));
+}
+
+bool HidlToAidlMiddleware_2_2::StartStream(
+    const SessionType_2_1& session_type) {
+  return BluetoothAudioSessionControl::StartStream(
+      from_session_type_2_1(session_type));
+}
+
+bool HidlToAidlMiddleware_2_2::SuspendStream(
+    const SessionType_2_1& session_type) {
+  return BluetoothAudioSessionControl::SuspendStream(
+      from_session_type_2_1(session_type));
+}
+
+void HidlToAidlMiddleware_2_2::StopStream(const SessionType_2_1& session_type) {
+  return BluetoothAudioSessionControl::StopStream(
+      from_session_type_2_1(session_type));
+}
+
+void HidlToAidlMiddleware_2_2::UpdateSinkMetadata(
+    const SessionType_2_1& session_type,
+    const struct sink_metadata* sink_metadata) {
+  return BluetoothAudioSessionControl::UpdateSinkMetadata(
+      from_session_type_2_1(session_type), *sink_metadata);
+}
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_0.h b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_0.h
new file mode 100644
index 0000000..d10ee37
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_0.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/bluetooth/audio/2.0/types.h>
+
+#include "../session/BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using SessionType_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::SessionType;
+using PortStatusCallbacks_2_0 =
+    ::android::bluetooth::audio::PortStatusCallbacks;
+using AudioConfig_2_0 =
+    ::android::hardware::bluetooth::audio::V2_0::AudioConfiguration;
+
+class HidlToAidlMiddleware_2_0 {
+ public:
+  static bool IsAidlAvailable();
+
+  static bool IsSessionReady(const SessionType_2_0& session_type);
+
+  static uint16_t RegisterControlResultCback(
+      const SessionType_2_0& session_type,
+      const PortStatusCallbacks_2_0& cbacks);
+
+  static void UnregisterControlResultCback(const SessionType_2_0& session_type,
+                                           uint16_t cookie);
+
+  static const AudioConfig_2_0 GetAudioConfig(
+      const SessionType_2_0& session_type);
+
+  static bool StartStream(const SessionType_2_0& session_type);
+
+  static void StopStream(const SessionType_2_0& session_type);
+
+  static bool SuspendStream(const SessionType_2_0& session_type);
+
+  static bool GetPresentationPosition(const SessionType_2_0& session_type,
+                                      uint64_t* remote_delay_report_ns,
+                                      uint64_t* total_bytes_readed,
+                                      timespec* data_position);
+
+  static void UpdateTracksMetadata(
+      const SessionType_2_0& session_type,
+      const struct source_metadata* source_metadata);
+
+  static size_t OutWritePcmData(const SessionType_2_0& session_type,
+                                const void* buffer, size_t bytes);
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_1.h b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_1.h
new file mode 100644
index 0000000..82dce96
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_1.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/bluetooth/audio/2.1/types.h>
+
+#include "../session/BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using SessionType_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::SessionType;
+using AudioConfig_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::AudioConfiguration;
+
+class HidlToAidlMiddleware_2_1 {
+ public:
+  static const AudioConfig_2_1 GetAudioConfig(
+      const SessionType_2_1& session_type);
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_2.h b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_2.h
new file mode 100644
index 0000000..149e404
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_2.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/bluetooth/audio/2.2/types.h>
+
+#include "../session/BluetoothAudioSession.h"
+#include "../session/BluetoothAudioSession_2_2.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using SessionType_2_1 =
+    ::android::hardware::bluetooth::audio::V2_1::SessionType;
+using PortStatusCallbacks_2_0 =
+    ::android::bluetooth::audio::PortStatusCallbacks;
+using PortStatusCallbacks_2_2 =
+    ::android::bluetooth::audio::PortStatusCallbacks_2_2;
+using AudioConfig_2_2 =
+    ::android::hardware::bluetooth::audio::V2_2::AudioConfiguration;
+
+class HidlToAidlMiddleware_2_2 {
+ public:
+  static bool IsSessionReady(const SessionType_2_1& session_type);
+
+  static uint16_t RegisterControlResultCback(
+      const SessionType_2_1& session_type,
+      const PortStatusCallbacks_2_2& cbacks);
+
+  static void UnregisterControlResultCback(const SessionType_2_1& session_type,
+                                           uint16_t cookie);
+
+  static const AudioConfig_2_2 GetAudioConfig(
+      const SessionType_2_1& session_type);
+
+  static bool StartStream(const SessionType_2_1& session_type);
+
+  static bool SuspendStream(const SessionType_2_1& session_type);
+
+  static void StopStream(const SessionType_2_1& session_type);
+
+  static void UpdateSinkMetadata(const SessionType_2_1& session_type,
+                                 const struct sink_metadata* sink_metadata);
+};
+
+}  // namespace audio
+}  // namespace bluetooth
+}  // namespace hardware
+}  // namespace android
+}  // namespace aidl
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession.cpp b/bluetooth/audio/utils/session/BluetoothAudioSession.cpp
index 2f3ddaf..6d5608b 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession.cpp
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession.cpp
@@ -21,10 +21,13 @@
 #include <android-base/logging.h>
 #include <android-base/stringprintf.h>
 
+#include "../aidl_session/HidlToAidlMiddleware_2_0.h"
+
 namespace android {
 namespace bluetooth {
 namespace audio {
 
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_0;
 using ::android::hardware::audio::common::V5_0::AudioContentType;
 using ::android::hardware::audio::common::V5_0::AudioUsage;
 using ::android::hardware::audio::common::V5_0::PlaybackTrackMetadata;
@@ -149,6 +152,8 @@
 // The function helps to check if this session is ready or not
 // @return: true if the Bluetooth stack has started the specified session
 bool BluetoothAudioSession::IsSessionReady() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::IsSessionReady(session_type_);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   bool dataMQ_valid =
       (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_DATAPATH ||
@@ -200,6 +205,9 @@
 // @return: cookie - the assigned number to this bluetooth_audio output
 uint16_t BluetoothAudioSession::RegisterStatusCback(
     const PortStatusCallbacks& cbacks) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::RegisterControlResultCback(session_type_,
+                                                                cbacks);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   uint16_t cookie = ObserversCookieGetInitValue(session_type_);
   uint16_t cookie_upper_bound = ObserversCookieGetUpperBound(session_type_);
@@ -227,6 +235,9 @@
 // PortStatusCallbacks
 // @param: cookie - indicates which bluetooth_audio output is
 void BluetoothAudioSession::UnregisterStatusCback(uint16_t cookie) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::UnregisterControlResultCback(session_type_,
+                                                                  cookie);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (observers_.erase(cookie) != 1) {
     LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
@@ -238,6 +249,9 @@
 // The control function is for the bluetooth_audio module to get the current
 // AudioConfiguration
 const AudioConfiguration& BluetoothAudioSession::GetAudioConfig() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return (audio_config_ =
+                HidlToAidlMiddleware_2_0::GetAudioConfig(session_type_));
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (IsSessionReady()) {
     return audio_config_;
@@ -251,6 +265,8 @@
 // Those control functions are for the bluetooth_audio module to start, suspend,
 // stop stream, to check position, and to update metadata.
 bool BluetoothAudioSession::StartStream() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::StartStream(session_type_);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -267,6 +283,8 @@
 }
 
 bool BluetoothAudioSession::SuspendStream() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::SuspendStream(session_type_);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -283,6 +301,8 @@
 }
 
 void BluetoothAudioSession::StopStream() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::StopStream(session_type_);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (!IsSessionReady()) {
     return;
@@ -297,6 +317,10 @@
 bool BluetoothAudioSession::GetPresentationPosition(
     uint64_t* remote_delay_report_ns, uint64_t* total_bytes_readed,
     timespec* data_position) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::GetPresentationPosition(
+        session_type_, remote_delay_report_ns, total_bytes_readed,
+        data_position);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -330,6 +354,9 @@
 
 void BluetoothAudioSession::UpdateTracksMetadata(
     const struct source_metadata* source_metadata) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::UpdateTracksMetadata(session_type_,
+                                                          source_metadata);
   std::lock_guard<std::recursive_mutex> guard(mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -374,6 +401,9 @@
 // The control function writes stream to FMQ
 size_t BluetoothAudioSession::OutWritePcmData(const void* buffer,
                                               size_t bytes) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_0::OutWritePcmData(session_type_, buffer,
+                                                     bytes);
   if (buffer == nullptr || !bytes) return 0;
   size_t totalWritten = 0;
   int ms_timeout = kFmqSendTimeoutMs;
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp
index bf1f9b5..276a291 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp
@@ -21,9 +21,14 @@
 #include <android-base/logging.h>
 #include <android-base/stringprintf.h>
 
+#include "../aidl_session/HidlToAidlMiddleware_2_0.h"
+#include "../aidl_session/HidlToAidlMiddleware_2_1.h"
+
 namespace android {
 namespace bluetooth {
 namespace audio {
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_0;
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_1;
 using SessionType_2_1 =
     ::android::hardware::bluetooth::audio::V2_1::SessionType;
 using SessionType_2_0 =
@@ -72,6 +77,7 @@
   } else {
     session_type_2_1_ = (session_type);
   }
+  raw_session_type_ = session_type;
 }
 
 std::shared_ptr<BluetoothAudioSession>
@@ -83,6 +89,8 @@
 // AudioConfiguration
 const ::android::hardware::bluetooth::audio::V2_1::AudioConfiguration
 BluetoothAudioSession_2_1::GetAudioConfig() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_1::GetAudioConfig(raw_session_type_);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (audio_session->IsSessionReady()) {
     // If session is unknown it means it should be 2.0 type
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h
index 5a35153..e634064 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h
@@ -31,6 +31,7 @@
   std::shared_ptr<BluetoothAudioSession> audio_session;
 
   ::android::hardware::bluetooth::audio::V2_1::SessionType session_type_2_1_;
+  ::android::hardware::bluetooth::audio::V2_1::SessionType raw_session_type_;
 
   // audio data configuration for both software and offloading
   ::android::hardware::bluetooth::audio::V2_1::AudioConfiguration
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp
index 60ac4ec..4613ddc 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp
@@ -22,10 +22,15 @@
 #include <android-base/stringprintf.h>
 #include <android/hardware/bluetooth/audio/2.2/IBluetoothAudioPort.h>
 
+#include "../aidl_session/HidlToAidlMiddleware_2_0.h"
+#include "../aidl_session/HidlToAidlMiddleware_2_2.h"
+
 namespace android {
 namespace bluetooth {
 namespace audio {
 
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_0;
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_2;
 using ::android::hardware::audio::common::V5_0::AudioSource;
 using ::android::hardware::audio::common::V5_0::RecordTrackMetadata;
 using ::android::hardware::audio::common::V5_0::SinkMetadata;
@@ -93,6 +98,7 @@
   } else {
     session_type_2_1_ = (session_type);
   }
+  raw_session_type_ = session_type;
   invalidSoftwareAudioConfiguration.pcmConfig(kInvalidPcmParameters);
   invalidOffloadAudioConfiguration.codecConfig(
       audio_session->kInvalidCodecConfiguration);
@@ -100,6 +106,8 @@
 }
 
 bool BluetoothAudioSession_2_2::IsSessionReady() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::IsSessionReady(raw_session_type_);
   if (session_type_2_1_ !=
           SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
       session_type_2_1_ !=
@@ -122,6 +130,9 @@
 
 void BluetoothAudioSession_2_2::UpdateSinkMetadata(
     const struct sink_metadata* sink_metadata) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::UpdateSinkMetadata(raw_session_type_,
+                                                        sink_metadata);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
@@ -172,6 +183,8 @@
 // AudioConfiguration
 const ::android::hardware::bluetooth::audio::V2_2::AudioConfiguration
 BluetoothAudioSession_2_2::GetAudioConfig() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::GetAudioConfig(raw_session_type_);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (IsSessionReady()) {
     auto audio_config_discriminator = audio_config_2_2_.getDiscriminator();
@@ -226,6 +239,8 @@
 // Those control functions are for the bluetooth_audio module to start, suspend,
 // stop stream, to check position, and to update metadata.
 bool BluetoothAudioSession_2_2::StartStream() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::StartStream(raw_session_type_);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
@@ -242,6 +257,8 @@
 }
 
 bool BluetoothAudioSession_2_2::SuspendStream() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::SuspendStream(raw_session_type_);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (!IsSessionReady()) {
     LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
@@ -258,6 +275,8 @@
 }
 
 void BluetoothAudioSession_2_2::StopStream() {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::StopStream(raw_session_type_);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (!IsSessionReady()) {
     return;
@@ -395,6 +414,9 @@
 // @return: cookie - the assigned number to this bluetooth_audio output
 uint16_t BluetoothAudioSession_2_2::RegisterStatusCback(
     const PortStatusCallbacks_2_2& cbacks) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::RegisterControlResultCback(
+        raw_session_type_, cbacks);
   if (session_type_2_1_ !=
           SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
       session_type_2_1_ !=
@@ -432,6 +454,9 @@
 // PortStatusCallbacks_2_2
 // @param: cookie - indicates which bluetooth_audio output is
 void BluetoothAudioSession_2_2::UnregisterStatusCback(uint16_t cookie) {
+  if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+    return HidlToAidlMiddleware_2_2::UnregisterControlResultCback(
+        raw_session_type_, cookie);
   std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
   if (session_type_2_1_ !=
           SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h
index 3673fd8..b6f96ab 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h
@@ -68,6 +68,7 @@
   std::shared_ptr<BluetoothAudioSession_2_1> audio_session_2_1;
 
   ::android::hardware::bluetooth::audio::V2_1::SessionType session_type_2_1_;
+  ::android::hardware::bluetooth::audio::V2_1::SessionType raw_session_type_;
 
   // audio data configuration for both software and offloading
   ::android::hardware::bluetooth::audio::V2_2::AudioConfiguration
diff --git a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
index c89d983..37d2973 100644
--- a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
+++ b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
@@ -935,7 +935,6 @@
             camera_metadata_ro_entry* streamConfigs,
             camera_metadata_ro_entry* maxResolutionStreamConfigs,
             const camera_metadata_t* staticMetadata);
-    static bool isColorCamera(const camera_metadata_t *metadata);
 
     static V3_2::DataspaceFlags getDataspace(PixelFormat format);
 
@@ -6187,141 +6186,6 @@
     }
 }
 
-// Test the multi-camera API requirement for Google Requirement Freeze S
-// Note that this requirement can only be partially tested. If a vendor
-// device doesn't expose a physical camera in any shape or form, there is no way
-// the test can catch it.
-TEST_P(CameraHidlTest, grfSMultiCameraTest) {
-    const int socGrfApi = property_get_int32("ro.board.first_api_level", /*default*/ -1);
-    if (socGrfApi < 31 /*S*/) {
-        // Non-GRF devices, or version < 31 Skip
-        ALOGI("%s: socGrfApi level is %d. Skipping", __FUNCTION__, socGrfApi);
-        return;
-    }
-
-    // Test that if more than one rear-facing color camera is
-    // supported, there must be at least one rear-facing logical camera.
-    hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(mProvider);
-    // Back facing non-logical color cameras
-    std::set<std::string> rearColorCameras;
-    // Back facing logical cameras' physical camera Id sets
-    std::set<std::set<std::string>> rearPhysicalIds;
-    for (const auto& name : cameraDeviceNames) {
-        std::string cameraId;
-        int deviceVersion = getCameraDeviceVersionAndId(name, mProviderType, &cameraId);
-        switch (deviceVersion) {
-            case CAMERA_DEVICE_API_VERSION_3_7:
-            case CAMERA_DEVICE_API_VERSION_3_6:
-            case CAMERA_DEVICE_API_VERSION_3_5:
-            case CAMERA_DEVICE_API_VERSION_3_4:
-            case CAMERA_DEVICE_API_VERSION_3_3:
-            case CAMERA_DEVICE_API_VERSION_3_2: {
-                ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
-                ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
-                Return<void> ret;
-                ret = mProvider->getCameraDeviceInterface_V3_x(
-                        name, [&](auto status, const auto& device) {
-                            ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
-                            ASSERT_EQ(Status::OK, status);
-                            ASSERT_NE(device, nullptr);
-                            device3_x = device;
-                        });
-                ASSERT_TRUE(ret.isOk());
-
-                ret = device3_x->getCameraCharacteristics([&](auto status, const auto& chars) {
-                    ASSERT_EQ(Status::OK, status);
-                    const camera_metadata_t* metadata = (camera_metadata_t*)chars.data();
-
-                    // Skip if this is not a color camera.
-                    if (!CameraHidlTest::isColorCamera(metadata)) {
-                        return;
-                    }
-
-                    // Check camera facing. Skip if facing is not BACK.
-                    // If this is not a logical camera, only note down
-                    // the camera ID, and skip.
-                    camera_metadata_ro_entry entry;
-                    int retcode = find_camera_metadata_ro_entry(
-                            metadata, ANDROID_LENS_FACING, &entry);
-                    ASSERT_EQ(retcode, 0);
-                    ASSERT_GT(entry.count, 0);
-                    uint8_t facing = entry.data.u8[0];
-                    bool isLogicalCamera = (isLogicalMultiCamera(metadata) == Status::OK);
-                    if (facing != ANDROID_LENS_FACING_BACK) {
-                        // Not BACK facing. Skip.
-                        return;
-                    }
-                    if (!isLogicalCamera) {
-                        rearColorCameras.insert(cameraId);
-                        return;
-                    }
-
-                    // Check logical camera's physical camera IDs for color
-                    // cameras.
-                    std::unordered_set<std::string> physicalCameraIds;
-                    Status s = getPhysicalCameraIds(metadata, &physicalCameraIds);
-                    ASSERT_EQ(Status::OK, s);
-                    rearPhysicalIds.emplace(physicalCameraIds.begin(), physicalCameraIds.end());
-                    for (const auto& physicalId : physicalCameraIds) {
-                        // Skip if the physicalId is publicly available
-                        for (auto& deviceName : cameraDeviceNames) {
-                            std::string publicVersion, publicId;
-                            ASSERT_TRUE(::matchDeviceName(deviceName, mProviderType,
-                                                          &publicVersion, &publicId));
-                            if (physicalId == publicId) {
-                                // Skip because public Ids will be iterated in outer loop.
-                                return;
-                            }
-                        }
-
-                        auto castResult = device::V3_5::ICameraDevice::castFrom(device3_x);
-                        ASSERT_TRUE(castResult.isOk());
-                        ::android::sp<::android::hardware::camera::device::V3_5::ICameraDevice>
-                                device3_5 = castResult;
-                        ASSERT_NE(device3_5, nullptr);
-
-                        // Check camera characteristics for hidden camera id
-                        Return<void> ret = device3_5->getPhysicalCameraCharacteristics(
-                                physicalId, [&](auto status, const auto& chars) {
-                            ASSERT_EQ(Status::OK, status);
-                            const camera_metadata_t* physicalMetadata =
-                                    (camera_metadata_t*)chars.data();
-
-                            if (CameraHidlTest::isColorCamera(physicalMetadata)) {
-                                rearColorCameras.insert(physicalId);
-                            }
-                        });
-                        ASSERT_TRUE(ret.isOk());
-                    }
-                });
-                ASSERT_TRUE(ret.isOk());
-            } break;
-            case CAMERA_DEVICE_API_VERSION_1_0: {
-                // Not applicable
-            } break;
-            default: {
-                ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
-                ADD_FAILURE();
-            } break;
-        }
-    }
-
-    // If there are more than one rear-facing color camera, a logical
-    // multi-camera must be defined consisting of all rear-facing color
-    // cameras.
-    if (rearColorCameras.size() > 1) {
-        bool hasRearLogical = false;
-        for (const auto& physicalIds : rearPhysicalIds) {
-            if (std::includes(physicalIds.begin(), physicalIds.end(),
-                    rearColorCameras.begin(), rearColorCameras.end())) {
-                hasRearLogical = true;
-                break;
-            }
-        }
-        ASSERT_TRUE(hasRearLogical);
-    }
-}
-
 // Retrieve all valid output stream resolutions from the camera
 // static characteristics.
 Status CameraHidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
@@ -6794,23 +6658,6 @@
     return ret;
 }
 
-bool CameraHidlTest::isColorCamera(const camera_metadata_t *metadata) {
-    camera_metadata_ro_entry entry;
-    int retcode = find_camera_metadata_ro_entry(
-            metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
-    if ((0 == retcode) && (entry.count > 0)) {
-        bool isBackwardCompatible = (std::find(entry.data.u8, entry.data.u8 + entry.count,
-                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) !=
-                entry.data.u8 + entry.count);
-        bool isMonochrome = (std::find(entry.data.u8, entry.data.u8 + entry.count,
-                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) !=
-                entry.data.u8 + entry.count);
-        bool isColor = isBackwardCompatible && !isMonochrome;
-        return isColor;
-    }
-    return false;
-}
-
 // Retrieve the reprocess input-output format map from the static
 // camera characteristics.
 Status CameraHidlTest::getZSLInputOutputMap(camera_metadata_t *staticMeta,
diff --git a/common/support/Android.bp b/common/support/Android.bp
index b24893b..718901e 100644
--- a/common/support/Android.bp
+++ b/common/support/Android.bp
@@ -11,7 +11,11 @@
     name: "libaidlcommonsupport",
     vendor_available: true,
     host_supported: true,
-    defaults: ["libbinder_ndk_host_user"],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
     srcs: ["NativeHandle.cpp"],
     export_include_dirs: ["include"],
     shared_libs: [
@@ -28,7 +32,11 @@
 cc_test {
     name: "libaidlcommonsupport_test",
     host_supported: true,
-    defaults: ["libbinder_ndk_host_user"],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
     srcs: ["test.cpp"],
     static_libs: [
         "android.hardware.common-V2-ndk",
diff --git a/compatibility_matrices/compatibility_matrix.current.xml b/compatibility_matrices/compatibility_matrix.current.xml
index ffbcb36..2d8a82e 100644
--- a/compatibility_matrices/compatibility_matrix.current.xml
+++ b/compatibility_matrices/compatibility_matrix.current.xml
@@ -292,7 +292,7 @@
     </hal>
     <hal format="aidl" optional="true">
         <name>android.hardware.identity</name>
-        <version>1-3</version>
+        <version>1-4</version>
         <interface>
             <name>IIdentityCredentialStore</name>
             <instance>default</instance>
@@ -411,7 +411,7 @@
     </hal>
     <hal format="aidl" optional="true">
         <name>android.hardware.neuralnetworks</name>
-        <version>1-3</version>
+        <version>1-4</version>
         <interface>
             <name>IDevice</name>
             <regex-instance>.*</regex-instance>
@@ -628,7 +628,7 @@
             <instance>default</instance>
         </interface>
     </hal>
-    <hal format="hidl" optional="true">
+    <hal format="hidl" optional="false">
         <name>android.hardware.thermal</name>
         <version>2.0</version>
         <interface>
diff --git a/current.txt b/current.txt
index 21ee123..7718b98 100644
--- a/current.txt
+++ b/current.txt
@@ -903,6 +903,7 @@
 
 # ABI preserving changes to HALs during Android T
 62ace52d9c3ff1f60f94118557a2aaf0b953513e59dcd34d5f94ae28d4c7e780 android.hardware.fastboot@1.0::IFastboot
+d0fb32f3ddeb9af7115ab32905225ea69b930d2472be8e9610f0cf136c15aefb android.hardware.keymaster@4.0::IKeymasterDevice # b/210424594
 ca62a2a95d173ed323309e5e00f653ad3cceec82a6e5e4976a249cb5aafe2515 android.hardware.neuralnetworks@1.2::types
 fa76bced6b1b71c40fc706c508a9011284c57f57831cd0cf5f45653ed4ea463e android.hardware.neuralnetworks@1.3::types
 
diff --git a/graphics/composer/2.1/default/OWNERS b/graphics/composer/2.1/default/OWNERS
index 709c4d1..331c80d 100644
--- a/graphics/composer/2.1/default/OWNERS
+++ b/graphics/composer/2.1/default/OWNERS
@@ -1,3 +1,4 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.1/utils/OWNERS b/graphics/composer/2.1/utils/OWNERS
index 7af69b4..83c4f5f 100644
--- a/graphics/composer/2.1/utils/OWNERS
+++ b/graphics/composer/2.1/utils/OWNERS
@@ -1,2 +1,3 @@
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.1/vts/OWNERS b/graphics/composer/2.1/vts/OWNERS
index ea06752..a643bbd 100644
--- a/graphics/composer/2.1/vts/OWNERS
+++ b/graphics/composer/2.1/vts/OWNERS
@@ -1,5 +1,6 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
 
 # VTS team
diff --git a/graphics/composer/2.1/vts/functional/OWNERS b/graphics/composer/2.1/vts/functional/OWNERS
index a2ed8c8..3d970d1 100644
--- a/graphics/composer/2.1/vts/functional/OWNERS
+++ b/graphics/composer/2.1/vts/functional/OWNERS
@@ -1,2 +1,4 @@
 # Bug component: 25423
+adyabr@google.com
+alecmouri@google.com
 sumir@google.com
diff --git a/graphics/composer/2.2/default/OWNERS b/graphics/composer/2.2/default/OWNERS
index 709c4d1..e8f584d 100644
--- a/graphics/composer/2.2/default/OWNERS
+++ b/graphics/composer/2.2/default/OWNERS
@@ -1,3 +1,5 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
+
diff --git a/graphics/composer/2.2/utils/OWNERS b/graphics/composer/2.2/utils/OWNERS
index 709c4d1..331c80d 100644
--- a/graphics/composer/2.2/utils/OWNERS
+++ b/graphics/composer/2.2/utils/OWNERS
@@ -1,3 +1,4 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.2/vts/functional/OWNERS b/graphics/composer/2.2/vts/functional/OWNERS
index 31b0dc7..a4eb0ca 100644
--- a/graphics/composer/2.2/vts/functional/OWNERS
+++ b/graphics/composer/2.2/vts/functional/OWNERS
@@ -1,5 +1,6 @@
 # Bug component: 25423
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
 sumir@google.com
diff --git a/graphics/composer/2.3/default/OWNERS b/graphics/composer/2.3/default/OWNERS
index 709c4d1..331c80d 100644
--- a/graphics/composer/2.3/default/OWNERS
+++ b/graphics/composer/2.3/default/OWNERS
@@ -1,3 +1,4 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.3/utils/OWNERS b/graphics/composer/2.3/utils/OWNERS
index 709c4d1..331c80d 100644
--- a/graphics/composer/2.3/utils/OWNERS
+++ b/graphics/composer/2.3/utils/OWNERS
@@ -1,3 +1,4 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.3/vts/functional/OWNERS b/graphics/composer/2.3/vts/functional/OWNERS
index 31b0dc7..a4eb0ca 100644
--- a/graphics/composer/2.3/vts/functional/OWNERS
+++ b/graphics/composer/2.3/vts/functional/OWNERS
@@ -1,5 +1,6 @@
 # Bug component: 25423
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
 sumir@google.com
diff --git a/graphics/composer/2.4/default/OWNERS b/graphics/composer/2.4/default/OWNERS
index 709c4d1..331c80d 100644
--- a/graphics/composer/2.4/default/OWNERS
+++ b/graphics/composer/2.4/default/OWNERS
@@ -1,3 +1,4 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.4/utils/OWNERS b/graphics/composer/2.4/utils/OWNERS
index 709c4d1..331c80d 100644
--- a/graphics/composer/2.4/utils/OWNERS
+++ b/graphics/composer/2.4/utils/OWNERS
@@ -1,3 +1,4 @@
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
diff --git a/graphics/composer/2.4/vts/functional/OWNERS b/graphics/composer/2.4/vts/functional/OWNERS
index 31b0dc7..a4eb0ca 100644
--- a/graphics/composer/2.4/vts/functional/OWNERS
+++ b/graphics/composer/2.4/vts/functional/OWNERS
@@ -1,5 +1,6 @@
 # Bug component: 25423
 # Graphics team
 adyabr@google.com
+alecmouri@google.com
 lpy@google.com
 sumir@google.com
diff --git a/health/aidl/Android.bp b/health/aidl/Android.bp
index 22bb4fa..86bca69 100644
--- a/health/aidl/Android.bp
+++ b/health/aidl/Android.bp
@@ -62,9 +62,11 @@
         "android.hardware.health@2.0",
         "android.hardware.health@2.1",
     ],
-    defaults: [
-        "libbinder_ndk_host_user",
-    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 java_library {
diff --git a/health/utils/libhealthshim/Android.bp b/health/utils/libhealthshim/Android.bp
index 311e951..42e4ea7 100644
--- a/health/utils/libhealthshim/Android.bp
+++ b/health/utils/libhealthshim/Android.bp
@@ -24,9 +24,11 @@
 cc_defaults {
     name: "libhealthshim_defaults",
     host_supported: true, // for testing
-    defaults: [
-        "libbinder_ndk_host_user",
-    ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/Certificate.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/Certificate.aidl
index d8a8128..83e1797 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/Certificate.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/Certificate.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/CipherSuite.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/CipherSuite.aidl
index 2685525..e6ec04e 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/CipherSuite.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/CipherSuite.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/HardwareInformation.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/HardwareInformation.aidl
index f8d5a9e..cd8d56b 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/HardwareInformation.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/HardwareInformation.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredential.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredential.aidl
index 3224e4b..5065641 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredential.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredential.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredentialStore.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredentialStore.aidl
index c6fb3c8..c912c52 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredentialStore.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IIdentityCredentialStore.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
@@ -36,6 +37,7 @@
   android.hardware.identity.HardwareInformation getHardwareInformation();
   android.hardware.identity.IWritableIdentityCredential createCredential(in @utf8InCpp String docType, in boolean testCredential);
   android.hardware.identity.IIdentityCredential getCredential(in android.hardware.identity.CipherSuite cipherSuite, in byte[] credentialData);
+  android.hardware.identity.IPresentationSession createPresentationSession(in android.hardware.identity.CipherSuite cipherSuite);
   const int STATUS_OK = 0;
   const int STATUS_FAILED = 1;
   const int STATUS_CIPHER_SUITE_NOT_SUPPORTED = 2;
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IPresentationSession.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IPresentationSession.aidl
new file mode 100644
index 0000000..705dc29
--- /dev/null
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IPresentationSession.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.identity;
+@VintfStability
+interface IPresentationSession {
+  byte[] getEphemeralKeyPair();
+  long getAuthChallenge();
+  void setReaderEphemeralPublicKey(in byte[] publicKey);
+  void setSessionTranscript(in byte[] sessionTranscript);
+  android.hardware.identity.IIdentityCredential getCredential(in byte[] credentialData);
+}
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IWritableIdentityCredential.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IWritableIdentityCredential.aidl
index 19a29ec..9a0fa9e 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IWritableIdentityCredential.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/IWritableIdentityCredential.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestDataItem.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestDataItem.aidl
index c9c2b9f..cec8e0c 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestDataItem.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestDataItem.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestNamespace.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestNamespace.aidl
index aaf1e20..05b9ec2 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestNamespace.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/RequestNamespace.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/SecureAccessControlProfile.aidl b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/SecureAccessControlProfile.aidl
index 695fb3f..2003594 100644
--- a/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/SecureAccessControlProfile.aidl
+++ b/identity/aidl/aidl_api/android.hardware.identity/current/android/hardware/identity/SecureAccessControlProfile.aidl
@@ -12,7 +12,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *////////////////////////////////////////////////////////////////////////////////
+ */
+///////////////////////////////////////////////////////////////////////////////
 // THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
 ///////////////////////////////////////////////////////////////////////////////
 
diff --git a/identity/aidl/android/hardware/identity/IIdentityCredential.aidl b/identity/aidl/android/hardware/identity/IIdentityCredential.aidl
index 8ae293b..84d6ed0 100644
--- a/identity/aidl/android/hardware/identity/IIdentityCredential.aidl
+++ b/identity/aidl/android/hardware/identity/IIdentityCredential.aidl
@@ -17,9 +17,9 @@
 package android.hardware.identity;
 
 import android.hardware.identity.Certificate;
+import android.hardware.identity.IWritableIdentityCredential;
 import android.hardware.identity.RequestNamespace;
 import android.hardware.identity.SecureAccessControlProfile;
-import android.hardware.identity.IWritableIdentityCredential;
 import android.hardware.keymaster.HardwareAuthToken;
 import android.hardware.keymaster.VerificationToken;
 
@@ -44,6 +44,9 @@
      * This method was deprecated in API version 3 because there's no challenge so freshness
      * can't be checked. Use deleteCredentalWithChallenge() instead.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @return a COSE_Sign1 signature described above
      * @deprecated use deleteCredentalWithChallenge() instead.
      */
@@ -60,6 +63,9 @@
      * This method may only be called once per instance. If called more than once, STATUS_FAILED
      * will be returned.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @return the private key, in DER format as specified in RFC 5915.
      */
     byte[] createEphemeralKeyPair();
@@ -70,6 +76,9 @@
      * This method may only be called once per instance. If called more than once, STATUS_FAILED
      * will be returned.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @param publicKey contains the reader's ephemeral public key, in uncompressed
      *        form (e.g. 0x04 || X || Y).
      */
@@ -83,6 +92,9 @@
      * This method may only be called once per instance. If called more than once, STATUS_FAILED
      * will be returned. If user authentication is not needed, this method may not be called.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @return challenge, a non-zero number.
      */
     long createAuthChallenge();
@@ -371,6 +383,9 @@
      * This CBOR enables an issuer to determine the exact state of the credential it
      * returns issuer-signed data for.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @param out signingKeyBlob contains an AES-GCM-ENC(storageKey, R, signingKey, docType)
      *     where signingKey is an EC private key in uncompressed form. That is, the returned
      *     blob is an encrypted copy of the newly-generated private signing key.
@@ -420,6 +435,9 @@
      *
      * This method was introduced in API version 3.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @param challenge a challenge set by the issuer to ensure freshness. Maximum size is 32 bytes
      *     and it may be empty. Fails with STATUS_INVALID_DATA if bigger than 32 bytes.
      * @return a COSE_Sign1 signature described above.
@@ -442,6 +460,9 @@
      *
      * This method was introduced in API version 3.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @param challenge a challenge set by the issuer to ensure freshness. Maximum size is 32 bytes
      *     and it may be empty. Fails with STATUS_INVALID_DATA if bigger than 32 bytes.
      * @return a COSE_Sign1 signature described above.
@@ -456,6 +477,9 @@
      *
      * This method was introduced in API version 3.
      *
+     * If the method is called on an instance obtained via IPresentationSession.getCredential(),
+     * STATUS_FAILED must be returned.
+     *
      * @return an IWritableIdentityCredential
      */
     IWritableIdentityCredential updateCredential();
diff --git a/identity/aidl/android/hardware/identity/IIdentityCredentialStore.aidl b/identity/aidl/android/hardware/identity/IIdentityCredentialStore.aidl
index 638be79..86be7f5 100644
--- a/identity/aidl/android/hardware/identity/IIdentityCredentialStore.aidl
+++ b/identity/aidl/android/hardware/identity/IIdentityCredentialStore.aidl
@@ -16,10 +16,11 @@
 
 package android.hardware.identity;
 
-import android.hardware.identity.IIdentityCredential;
-import android.hardware.identity.IWritableIdentityCredential;
-import android.hardware.identity.HardwareInformation;
 import android.hardware.identity.CipherSuite;
+import android.hardware.identity.HardwareInformation;
+import android.hardware.identity.IIdentityCredential;
+import android.hardware.identity.IPresentationSession;
+import android.hardware.identity.IWritableIdentityCredential;
 
 /**
  * IIdentityCredentialStore provides an interface to a secure store for user identity documents.
@@ -105,7 +106,7 @@
  * STATUS_* integers defined in this interface. Each method states which status can be returned
  * and under which circumstances.
  *
- * The API described here is API version 3 which corresponds to feature version 202101
+ * The API described here is API version 4 which corresponds to feature version 202201
  * of the android.security.identity Framework API. An XML file declaring the feature
  * android.hardware.identity_credential (or android.hardware.identity_credential.direct_access
  * if implementing the Direct Access HAL) should be included declaring this feature version.
@@ -241,4 +242,25 @@
      * @return an IIdentityCredential interface that provides operations on the Credential.
      */
     IIdentityCredential getCredential(in CipherSuite cipherSuite, in byte[] credentialData);
+
+    /**
+     * createPresentationSession creates IPresentationSession interface which can be used to
+     * present one or more credentials to a remote verifier device.
+     *
+     * The cipher suite used to communicate with the remote verifier must be specified. Currently
+     * only a single cipher-suite is supported. Support for other cipher suites may be added in a
+     * future version of this HAL. If the requested cipher suite is not support the call fails
+     * with STATUS_CIPHER_SUITE_NOT_SUPPORTED.
+     *
+     * In this version of the HAL, implementations are only required to support a single session
+     * being active. In a future version, implementations may be required to support multiple
+     * presentation sessions being active at the same time.
+     *
+     * This method was introduced in API version 4.
+     *
+     * @param cipherSuite The cipher suite to use.
+     *
+     * @return an IPresentationSession interface.
+     */
+    IPresentationSession createPresentationSession(in CipherSuite cipherSuite);
 }
diff --git a/identity/aidl/android/hardware/identity/IPresentationSession.aidl b/identity/aidl/android/hardware/identity/IPresentationSession.aidl
new file mode 100644
index 0000000..b0449f0
--- /dev/null
+++ b/identity/aidl/android/hardware/identity/IPresentationSession.aidl
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.identity;
+
+import android.hardware.identity.CipherSuite;
+import android.hardware.identity.IIdentityCredential;
+
+/**
+ * An interface to present multiple credentials in the same session.
+ *
+ * This interface was introduced in API version 4.
+ *
+ */
+@VintfStability
+interface IPresentationSession {
+    /**
+     * Gets the ephemeral EC key pair to be used in establishing a secure session with a reader.
+     * This method returns the private key so the caller can perform an ECDH key agreement operation
+     * with the reader.  The reason for generating the key pair in the secure environment is so that
+     * the secure environment knows what public key to expect to find in the session transcript
+     * when presenting credentials.
+     *
+     * The generated key matches the selected cipher suite of the presentation session (e.g. EC
+     * key using the P-256 curve).
+     *
+     * @return the private key, in DER format as specified in RFC 5915.
+     */
+    byte[] getEphemeralKeyPair();
+
+    /**
+     * Gets the challenge value to be used for proving successful user authentication. This
+     * is to be included in the authToken passed to the IIdentityCredential.startRetrieval()
+     * method and the verificationToken passed to the IIdentityCredential.setVerificationToken()
+     * method.
+     *
+     * @return challenge, a non-zero number.
+     */
+    long getAuthChallenge();
+
+    /**
+     * Sets the public part of the reader's ephemeral key pair to be used to complete
+     * an ECDH key agreement for the session.
+     *
+     * The curve of the key must match the curve for the key returned by getEphemeralKeyPair().
+     *
+     * This method may only be called once per instance. If called more than once, STATUS_FAILED
+     * must be returned.
+     *
+     * @param publicKey contains the reader's ephemeral public key, in uncompressed
+     *        form (e.g. 0x04 || X || Y).
+     */
+    void setReaderEphemeralPublicKey(in byte[] publicKey);
+
+    /**
+     * Sets the session transcript for the session.
+     *
+     * This can be empty but if it's non-empty it must be valid CBOR.
+     *
+     * This method may only be called once per instance. If called more than once, STATUS_FAILED
+     * must be returned.
+     *
+     * @param sessionTrancsript the session transcript.
+     */
+    void setSessionTranscript(in byte[] sessionTranscript);
+
+    /**
+     * getCredential() retrieves an IIdentityCredential interface for presentation in the
+     * current presentation session.
+     *
+     * On the returned instance only the methods startRetrieval(), startRetrieveEntryValue(),
+     * retrieveEntryValue(), finishRetrieval(), setRequestedNamespaces(), setVerificationToken()
+     * may be called. Other methods will fail with STATUS_FAILED.
+     *
+     * The implementation is expected to get the session transcript, ephemeral key, reader
+     * ephemeral key, and auth challenge from this instance.
+     *
+     * @param credentialData is a CBOR-encoded structure containing metadata about the credential
+     *     and an encrypted byte array that contains data used to secure the credential.  See the
+     *     return argument of the same name in IWritableIdentityCredential.finishAddingEntries().
+     *
+     *     Note that the format of credentialData may depend on the feature version.
+     *     Implementations must support credentialData created by an earlier feature version.
+     *
+     * @return an IIdentityCredential interface that provides operations on the Credential.
+     */
+    IIdentityCredential getCredential(in byte[] credentialData);
+}
diff --git a/identity/aidl/default/Android.bp b/identity/aidl/default/Android.bp
index 3de8d30..ca24afa 100644
--- a/identity/aidl/default/Android.bp
+++ b/identity/aidl/default/Android.bp
@@ -13,6 +13,7 @@
     srcs: [
         "common/IdentityCredential.cpp",
         "common/IdentityCredentialStore.cpp",
+        "common/PresentationSession.cpp",
         "common/WritableIdentityCredential.cpp",
     ],
     export_include_dirs: [
@@ -39,8 +40,8 @@
         "libsoft_attestation_cert",
         "libpuresoftkeymasterdevice",
         "android.hardware.identity-support-lib",
-        "android.hardware.identity-V3-ndk",
-        "android.hardware.keymaster-V3-ndk",
+        "android.hardware.identity-V4-ndk",
+        "android.hardware.keymaster-V4-ndk",
     ],
 }
 
@@ -49,6 +50,7 @@
     vendor_available: true,
     srcs: [
         "libeic/EicCbor.c",
+        "libeic/EicSession.c",
         "libeic/EicPresentation.c",
         "libeic/EicProvisioning.c",
         "EicOpsImpl.cc",
@@ -100,8 +102,8 @@
         "libsoft_attestation_cert",
         "libpuresoftkeymasterdevice",
         "android.hardware.identity-support-lib",
-        "android.hardware.identity-V3-ndk",
-        "android.hardware.keymaster-V3-ndk",
+        "android.hardware.identity-V4-ndk",
+        "android.hardware.keymaster-V4-ndk",
         "android.hardware.identity-libeic-hal-common",
         "android.hardware.identity-libeic-library",
     ],
diff --git a/identity/aidl/default/EicOpsImpl.cc b/identity/aidl/default/EicOpsImpl.cc
index 8ec4cc9..c98a91e 100644
--- a/identity/aidl/default/EicOpsImpl.cc
+++ b/identity/aidl/default/EicOpsImpl.cc
@@ -20,9 +20,13 @@
 #include <tuple>
 #include <vector>
 
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <string.h>
+
 #include <android-base/logging.h>
 #include <android-base/stringprintf.h>
-#include <string.h>
 
 #include <android/hardware/identity/support/IdentityCredentialSupport.h>
 
@@ -63,6 +67,11 @@
     return strlen(s);
 }
 
+void* eicMemMem(const uint8_t* haystack, size_t haystackLen, const uint8_t* needle,
+                size_t needleLen) {
+    return memmem(haystack, haystackLen, needle, needleLen);
+}
+
 int eicCryptoMemCmp(const void* s1, const void* s2, size_t n) {
     return CRYPTO_memcmp(s1, s2, n);
 }
@@ -117,6 +126,25 @@
     return true;
 }
 
+bool eicNextId(uint32_t* id) {
+    uint32_t oldId = *id;
+    uint32_t newId = 0;
+
+    do {
+        union {
+            uint8_t value8;
+            uint32_t value32;
+        } value;
+        if (!eicOpsRandom(&value.value8, sizeof(value))) {
+            return false;
+        }
+        newId = value.value32;
+    } while (newId == oldId && newId == 0);
+
+    *id = newId;
+    return true;
+}
+
 bool eicOpsEncryptAes128Gcm(
         const uint8_t* key,    // Must be 16 bytes
         const uint8_t* nonce,  // Must be 12 bytes
diff --git a/identity/aidl/default/EicTests.cpp b/identity/aidl/default/EicTests.cpp
index a28080d..7b69b75 100644
--- a/identity/aidl/default/EicTests.cpp
+++ b/identity/aidl/default/EicTests.cpp
@@ -66,7 +66,8 @@
     // Then present data from it...
     //
     FakeSecureHardwarePresentationProxy presentationProxy;
-    ASSERT_TRUE(presentationProxy.initialize(isTestCredential, docType, credData.value()));
+    ASSERT_TRUE(presentationProxy.initialize(0 /* sessionId */, isTestCredential, docType,
+                                             credData.value()));
     AccessCheckResult res =
             presentationProxy.startRetrieveEntryValue(nameSpace, name, 1, content.size(), acpIds);
     ASSERT_EQ(res, AccessCheckResult::kNoAccessControlProfiles);
diff --git a/identity/aidl/default/FakeSecureHardwareProxy.cpp b/identity/aidl/default/FakeSecureHardwareProxy.cpp
index f0307dc..91e634c 100644
--- a/identity/aidl/default/FakeSecureHardwareProxy.cpp
+++ b/identity/aidl/default/FakeSecureHardwareProxy.cpp
@@ -23,6 +23,7 @@
 #include <android-base/logging.h>
 #include <android-base/stringprintf.h>
 #include <string.h>
+#include <map>
 
 #include <openssl/sha.h>
 
@@ -52,38 +53,110 @@
 
 // ----------------------------------------------------------------------
 
-FakeSecureHardwareProvisioningProxy::FakeSecureHardwareProvisioningProxy() {}
+// The singleton EicProvisioning object used everywhere.
+//
+EicProvisioning FakeSecureHardwareProvisioningProxy::ctx_;
 
-FakeSecureHardwareProvisioningProxy::~FakeSecureHardwareProvisioningProxy() {}
-
-bool FakeSecureHardwareProvisioningProxy::shutdown() {
-    LOG(INFO) << "FakeSecureHardwarePresentationProxy shutdown";
-    return true;
+FakeSecureHardwareProvisioningProxy::~FakeSecureHardwareProvisioningProxy() {
+    if (id_ != 0) {
+        shutdown();
+    }
 }
 
 bool FakeSecureHardwareProvisioningProxy::initialize(bool testCredential) {
-    LOG(INFO) << "FakeSecureHardwareProvisioningProxy created, sizeof(EicProvisioning): "
-              << sizeof(EicProvisioning);
-    return eicProvisioningInit(&ctx_, testCredential);
+    if (id_ != 0) {
+        LOG(WARNING) << "Proxy is already initialized";
+        return false;
+    }
+    bool initialized = eicProvisioningInit(&ctx_, testCredential);
+    if (!initialized) {
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "Error getting id";
+        return false;
+    }
+    id_ = id.value();
+    return true;
 }
 
 bool FakeSecureHardwareProvisioningProxy::initializeForUpdate(
-        bool testCredential, string docType, vector<uint8_t> encryptedCredentialKeys) {
-    return eicProvisioningInitForUpdate(&ctx_, testCredential, docType.c_str(),
-                                        docType.size(),
-                                        encryptedCredentialKeys.data(),
-                                        encryptedCredentialKeys.size());
+        bool testCredential, const string& docType,
+        const vector<uint8_t>& encryptedCredentialKeys) {
+    if (id_ != 0) {
+        LOG(WARNING) << "Proxy is already initialized";
+        return false;
+    }
+    bool initialized = eicProvisioningInitForUpdate(&ctx_, testCredential, docType.c_str(),
+                                                    docType.size(), encryptedCredentialKeys.data(),
+                                                    encryptedCredentialKeys.size());
+    if (!initialized) {
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "Error getting id";
+        return false;
+    }
+    id_ = id.value();
+    return true;
+}
+
+optional<uint32_t> FakeSecureHardwareProvisioningProxy::getId() {
+    uint32_t id;
+    if (!eicProvisioningGetId(&ctx_, &id)) {
+        return std::nullopt;
+    }
+    return id;
+}
+
+bool FakeSecureHardwareProvisioningProxy::validateId(const string& callerName) {
+    if (id_ == 0) {
+        LOG(WARNING) << "FakeSecureHardwareProvisioningProxy::" << callerName
+                     << ": While validating expected id is 0";
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "FakeSecureHardwareProvisioningProxy::" << callerName
+                     << ": Error getting id for validating";
+        return false;
+    }
+    if (id.value() != id_) {
+        LOG(WARNING) << "FakeSecureHardwareProvisioningProxy::" << callerName
+                     << ": While validating expected id " << id_ << " but got " << id.value();
+        return false;
+    }
+    return true;
+}
+
+bool FakeSecureHardwareProvisioningProxy::shutdown() {
+    bool validated = validateId(__func__);
+    id_ = 0;
+    if (!validated) {
+        return false;
+    }
+    if (!eicProvisioningShutdown(&ctx_)) {
+        LOG(INFO) << "Error shutting down provisioning";
+        return false;
+    }
+    return true;
 }
 
 // Returns public key certificate.
 optional<vector<uint8_t>> FakeSecureHardwareProvisioningProxy::createCredentialKey(
         const vector<uint8_t>& challenge, const vector<uint8_t>& applicationId) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     uint8_t publicKeyCert[4096];
     size_t publicKeyCertSize = sizeof publicKeyCert;
     if (!eicProvisioningCreateCredentialKey(&ctx_, challenge.data(), challenge.size(),
                                             applicationId.data(), applicationId.size(),
                                             publicKeyCert, &publicKeyCertSize)) {
-        return {};
+        return std::nullopt;
     }
     vector<uint8_t> pubKeyCert(publicKeyCertSize);
     memcpy(pubKeyCert.data(), publicKeyCert, publicKeyCertSize);
@@ -91,8 +164,11 @@
 }
 
 bool FakeSecureHardwareProvisioningProxy::startPersonalization(
-        int accessControlProfileCount, vector<int> entryCounts, const string& docType,
+        int accessControlProfileCount, const vector<int>& entryCounts, const string& docType,
         size_t expectedProofOfProvisioningSize) {
+    if (!validateId(__func__)) {
+        return false;
+    }
 
     if (!eicProvisioningStartPersonalization(&ctx_, accessControlProfileCount,
                                              entryCounts.data(),
@@ -108,13 +184,17 @@
 optional<vector<uint8_t>> FakeSecureHardwareProvisioningProxy::addAccessControlProfile(
         int id, const vector<uint8_t>& readerCertificate, bool userAuthenticationRequired,
         uint64_t timeoutMillis, uint64_t secureUserId) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> mac(28);
     uint8_t scratchSpace[512];
     if (!eicProvisioningAddAccessControlProfile(
                 &ctx_, id, readerCertificate.data(), readerCertificate.size(),
                 userAuthenticationRequired, timeoutMillis, secureUserId, mac.data(),
                 scratchSpace, sizeof(scratchSpace))) {
-        return {};
+        return std::nullopt;
     }
     return mac;
 }
@@ -122,6 +202,10 @@
 bool FakeSecureHardwareProvisioningProxy::beginAddEntry(const vector<int>& accessControlProfileIds,
                                                         const string& nameSpace, const string& name,
                                                         uint64_t entrySize) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
     uint8_t scratchSpace[512];
     vector<uint8_t> uint8AccessControlProfileIds;
     for (size_t i = 0; i < accessControlProfileIds.size(); i++) {
@@ -138,6 +222,10 @@
 optional<vector<uint8_t>> FakeSecureHardwareProvisioningProxy::addEntryValue(
         const vector<int>& accessControlProfileIds, const string& nameSpace, const string& name,
         const vector<uint8_t>& content) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> eicEncryptedContent;
     uint8_t scratchSpace[512];
     vector<uint8_t> uint8AccessControlProfileIds;
@@ -150,16 +238,20 @@
                 &ctx_, uint8AccessControlProfileIds.data(), uint8AccessControlProfileIds.size(),
                 nameSpace.c_str(), nameSpace.size(), name.c_str(), name.size(), content.data(),
                 content.size(), eicEncryptedContent.data(), scratchSpace, sizeof(scratchSpace))) {
-        return {};
+        return std::nullopt;
     }
     return eicEncryptedContent;
 }
 
 // Returns signatureOfToBeSigned (EIC_ECDSA_P256_SIGNATURE_SIZE bytes).
 optional<vector<uint8_t>> FakeSecureHardwareProvisioningProxy::finishAddingEntries() {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> signatureOfToBeSigned(EIC_ECDSA_P256_SIGNATURE_SIZE);
     if (!eicProvisioningFinishAddingEntries(&ctx_, signatureOfToBeSigned.data())) {
-        return {};
+        return std::nullopt;
     }
     return signatureOfToBeSigned;
 }
@@ -167,11 +259,15 @@
 // Returns encryptedCredentialKeys.
 optional<vector<uint8_t>> FakeSecureHardwareProvisioningProxy::finishGetCredentialData(
         const string& docType) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> encryptedCredentialKeys(116);
     size_t size = encryptedCredentialKeys.size();
     if (!eicProvisioningFinishGetCredentialData(&ctx_, docType.c_str(), docType.size(),
                                                 encryptedCredentialKeys.data(), &size)) {
-        return {};
+        return std::nullopt;
     }
     encryptedCredentialKeys.resize(size);
     return encryptedCredentialKeys;
@@ -179,21 +275,200 @@
 
 // ----------------------------------------------------------------------
 
-FakeSecureHardwarePresentationProxy::FakeSecureHardwarePresentationProxy() {}
+// The singleton EicSession object used everywhere.
+//
+EicSession FakeSecureHardwareSessionProxy::ctx_;
 
-FakeSecureHardwarePresentationProxy::~FakeSecureHardwarePresentationProxy() {}
+FakeSecureHardwareSessionProxy::~FakeSecureHardwareSessionProxy() {
+    if (id_ != 0) {
+        shutdown();
+    }
+}
 
-bool FakeSecureHardwarePresentationProxy::initialize(bool testCredential, string docType,
-                                                     vector<uint8_t> encryptedCredentialKeys) {
-    LOG(INFO) << "FakeSecureHardwarePresentationProxy created, sizeof(EicPresentation): "
-              << sizeof(EicPresentation);
-    return eicPresentationInit(&ctx_, testCredential, docType.c_str(), docType.size(),
-                               encryptedCredentialKeys.data(), encryptedCredentialKeys.size());
+bool FakeSecureHardwareSessionProxy::initialize() {
+    if (id_ != 0) {
+        LOG(WARNING) << "Proxy is already initialized";
+        return false;
+    }
+    bool initialized = eicSessionInit(&ctx_);
+    if (!initialized) {
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "Error getting id";
+        return false;
+    }
+    id_ = id.value();
+    return true;
+}
+
+optional<uint32_t> FakeSecureHardwareSessionProxy::getId() {
+    uint32_t id;
+    if (!eicSessionGetId(&ctx_, &id)) {
+        return std::nullopt;
+    }
+    return id;
+}
+
+bool FakeSecureHardwareSessionProxy::shutdown() {
+    bool validated = validateId(__func__);
+    id_ = 0;
+    if (!validated) {
+        return false;
+    }
+    if (!eicSessionShutdown(&ctx_)) {
+        LOG(INFO) << "Error shutting down session";
+        return false;
+    }
+    return true;
+}
+
+bool FakeSecureHardwareSessionProxy::validateId(const string& callerName) {
+    if (id_ == 0) {
+        LOG(WARNING) << "FakeSecureHardwareSessionProxy::" << callerName
+                     << ": While validating expected id is 0";
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "FakeSecureHardwareSessionProxy::" << callerName
+                     << ": Error getting id for validating";
+        return false;
+    }
+    if (id.value() != id_) {
+        LOG(WARNING) << "FakeSecureHardwareSessionProxy::" << callerName
+                     << ": While validating expected id " << id_ << " but got " << id.value();
+        return false;
+    }
+    return true;
+}
+
+optional<uint64_t> FakeSecureHardwareSessionProxy::getAuthChallenge() {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
+    uint64_t authChallenge;
+    if (!eicSessionGetAuthChallenge(&ctx_, &authChallenge)) {
+        return std::nullopt;
+    }
+    return authChallenge;
+}
+
+optional<vector<uint8_t>> FakeSecureHardwareSessionProxy::getEphemeralKeyPair() {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
+    vector<uint8_t> priv(EIC_P256_PRIV_KEY_SIZE);
+    if (!eicSessionGetEphemeralKeyPair(&ctx_, priv.data())) {
+        return std::nullopt;
+    }
+    return priv;
+}
+
+bool FakeSecureHardwareSessionProxy::setReaderEphemeralPublicKey(
+        const vector<uint8_t>& readerEphemeralPublicKey) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
+    return eicSessionSetReaderEphemeralPublicKey(&ctx_, readerEphemeralPublicKey.data());
+}
+
+bool FakeSecureHardwareSessionProxy::setSessionTranscript(
+        const vector<uint8_t>& sessionTranscript) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
+    return eicSessionSetSessionTranscript(&ctx_, sessionTranscript.data(),
+                                          sessionTranscript.size());
+}
+
+// ----------------------------------------------------------------------
+
+// The singleton EicPresentation object used everywhere.
+//
+EicPresentation FakeSecureHardwarePresentationProxy::ctx_;
+
+FakeSecureHardwarePresentationProxy::~FakeSecureHardwarePresentationProxy() {
+    if (id_ != 0) {
+        shutdown();
+    }
+}
+
+bool FakeSecureHardwarePresentationProxy::initialize(
+        uint32_t sessionId, bool testCredential, const string& docType,
+        const vector<uint8_t>& encryptedCredentialKeys) {
+    if (id_ != 0) {
+        LOG(WARNING) << "Proxy is already initialized";
+        return false;
+    }
+    bool initialized =
+            eicPresentationInit(&ctx_, sessionId, testCredential, docType.c_str(), docType.size(),
+                                encryptedCredentialKeys.data(), encryptedCredentialKeys.size());
+    if (!initialized) {
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "Error getting id";
+        return false;
+    }
+    id_ = id.value();
+    return true;
+}
+
+optional<uint32_t> FakeSecureHardwarePresentationProxy::getId() {
+    uint32_t id;
+    if (!eicPresentationGetId(&ctx_, &id)) {
+        return std::nullopt;
+    }
+    return id;
+}
+
+bool FakeSecureHardwarePresentationProxy::validateId(const string& callerName) {
+    if (id_ == 0) {
+        LOG(WARNING) << "FakeSecureHardwarePresentationProxy::" << callerName
+                     << ": While validating expected id is 0";
+        return false;
+    }
+    optional<uint32_t> id = getId();
+    if (!id) {
+        LOG(WARNING) << "FakeSecureHardwarePresentationProxy::" << callerName
+                     << ": Error getting id for validating";
+        return false;
+    }
+    if (id.value() != id_) {
+        LOG(WARNING) << "FakeSecureHardwarePresentationProxy::" << callerName
+                     << ": While validating expected id " << id_ << " but got " << id.value();
+        return false;
+    }
+    return true;
+}
+
+bool FakeSecureHardwarePresentationProxy::shutdown() {
+    bool validated = validateId(__func__);
+    id_ = 0;
+    if (!validated) {
+        return false;
+    }
+    if (!eicPresentationShutdown(&ctx_)) {
+        LOG(INFO) << "Error shutting down presentation";
+        return false;
+    }
+    return true;
 }
 
 // Returns publicKeyCert (1st component) and signingKeyBlob (2nd component)
 optional<pair<vector<uint8_t>, vector<uint8_t>>>
-FakeSecureHardwarePresentationProxy::generateSigningKeyPair(string docType, time_t now) {
+FakeSecureHardwarePresentationProxy::generateSigningKeyPair(const string& docType, time_t now) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     uint8_t publicKeyCert[512];
     size_t publicKeyCertSize = sizeof(publicKeyCert);
     vector<uint8_t> signingKeyBlob(60);
@@ -201,7 +476,7 @@
     if (!eicPresentationGenerateSigningKeyPair(&ctx_, docType.c_str(), docType.size(), now,
                                                publicKeyCert, &publicKeyCertSize,
                                                signingKeyBlob.data())) {
-        return {};
+        return std::nullopt;
     }
 
     vector<uint8_t> cert;
@@ -213,33 +488,44 @@
 
 // Returns private key
 optional<vector<uint8_t>> FakeSecureHardwarePresentationProxy::createEphemeralKeyPair() {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> priv(EIC_P256_PRIV_KEY_SIZE);
     if (!eicPresentationCreateEphemeralKeyPair(&ctx_, priv.data())) {
-        return {};
+        return std::nullopt;
     }
     return priv;
 }
 
 optional<uint64_t> FakeSecureHardwarePresentationProxy::createAuthChallenge() {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     uint64_t challenge;
     if (!eicPresentationCreateAuthChallenge(&ctx_, &challenge)) {
-        return {};
+        return std::nullopt;
     }
     return challenge;
 }
 
-bool FakeSecureHardwarePresentationProxy::shutdown() {
-    LOG(INFO) << "FakeSecureHardwarePresentationProxy shutdown";
-    return true;
-}
-
 bool FakeSecureHardwarePresentationProxy::pushReaderCert(const vector<uint8_t>& certX509) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
     return eicPresentationPushReaderCert(&ctx_, certX509.data(), certX509.size());
 }
 
 bool FakeSecureHardwarePresentationProxy::validateRequestMessage(
         const vector<uint8_t>& sessionTranscript, const vector<uint8_t>& requestMessage,
         int coseSignAlg, const vector<uint8_t>& readerSignatureOfToBeSigned) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
     return eicPresentationValidateRequestMessage(
             &ctx_, sessionTranscript.data(), sessionTranscript.size(), requestMessage.data(),
             requestMessage.size(), coseSignAlg, readerSignatureOfToBeSigned.data(),
@@ -251,6 +537,10 @@
         int hardwareAuthenticatorType, uint64_t timeStamp, const vector<uint8_t>& mac,
         uint64_t verificationTokenChallenge, uint64_t verificationTokenTimestamp,
         int verificationTokenSecurityLevel, const vector<uint8_t>& verificationTokenMac) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
     return eicPresentationSetAuthToken(&ctx_, challenge, secureUserId, authenticatorId,
                                        hardwareAuthenticatorType, timeStamp, mac.data(), mac.size(),
                                        verificationTokenChallenge, verificationTokenTimestamp,
@@ -261,6 +551,10 @@
 optional<bool> FakeSecureHardwarePresentationProxy::validateAccessControlProfile(
         int id, const vector<uint8_t>& readerCertificate, bool userAuthenticationRequired,
         int timeoutMillis, uint64_t secureUserId, const vector<uint8_t>& mac) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     bool accessGranted = false;
     uint8_t scratchSpace[512];
     if (!eicPresentationValidateAccessControlProfile(&ctx_, id, readerCertificate.data(),
@@ -268,12 +562,16 @@
                                                      userAuthenticationRequired, timeoutMillis,
                                                      secureUserId, mac.data(), &accessGranted,
                                                      scratchSpace, sizeof(scratchSpace))) {
-        return {};
+        return std::nullopt;
     }
     return accessGranted;
 }
 
 bool FakeSecureHardwarePresentationProxy::startRetrieveEntries() {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
     return eicPresentationStartRetrieveEntries(&ctx_);
 }
 
@@ -281,6 +579,10 @@
         const vector<uint8_t>& sessionTranscript, const vector<uint8_t>& readerEphemeralPublicKey,
         const vector<uint8_t>& signingKeyBlob, const string& docType,
         unsigned int numNamespacesWithValues, size_t expectedProofOfProvisioningSize) {
+    if (!validateId(__func__)) {
+        return false;
+    }
+
     if (signingKeyBlob.size() != 60) {
         eicDebug("Unexpected size %zd of signingKeyBlob, expected 60", signingKeyBlob.size());
         return false;
@@ -294,6 +596,10 @@
 AccessCheckResult FakeSecureHardwarePresentationProxy::startRetrieveEntryValue(
         const string& nameSpace, const string& name, unsigned int newNamespaceNumEntries,
         int32_t entrySize, const vector<int32_t>& accessControlProfileIds) {
+    if (!validateId(__func__)) {
+        return AccessCheckResult::kFailed;
+    }
+
     uint8_t scratchSpace[512];
     vector<uint8_t> uint8AccessControlProfileIds;
     for (size_t i = 0; i < accessControlProfileIds.size(); i++) {
@@ -324,6 +630,10 @@
 optional<vector<uint8_t>> FakeSecureHardwarePresentationProxy::retrieveEntryValue(
         const vector<uint8_t>& encryptedContent, const string& nameSpace, const string& name,
         const vector<int32_t>& accessControlProfileIds) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     uint8_t scratchSpace[512];
     vector<uint8_t> uint8AccessControlProfileIds;
     for (size_t i = 0; i < accessControlProfileIds.size(); i++) {
@@ -337,16 +647,20 @@
                 nameSpace.c_str(), nameSpace.size(), name.c_str(), name.size(),
                 uint8AccessControlProfileIds.data(), uint8AccessControlProfileIds.size(),
                 scratchSpace, sizeof(scratchSpace))) {
-        return {};
+        return std::nullopt;
     }
     return content;
 }
 
 optional<vector<uint8_t>> FakeSecureHardwarePresentationProxy::finishRetrieval() {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> mac(32);
     size_t macSize = 32;
     if (!eicPresentationFinishRetrieval(&ctx_, mac.data(), &macSize)) {
-        return {};
+        return std::nullopt;
     }
     mac.resize(macSize);
     return mac;
@@ -355,11 +669,15 @@
 optional<vector<uint8_t>> FakeSecureHardwarePresentationProxy::deleteCredential(
         const string& docType, const vector<uint8_t>& challenge, bool includeChallenge,
         size_t proofOfDeletionCborSize) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> signatureOfToBeSigned(EIC_ECDSA_P256_SIGNATURE_SIZE);
     if (!eicPresentationDeleteCredential(&ctx_, docType.c_str(), docType.size(), challenge.data(),
                                          challenge.size(), includeChallenge,
                                          proofOfDeletionCborSize, signatureOfToBeSigned.data())) {
-        return {};
+        return std::nullopt;
     }
     return signatureOfToBeSigned;
 }
@@ -367,11 +685,15 @@
 optional<vector<uint8_t>> FakeSecureHardwarePresentationProxy::proveOwnership(
         const string& docType, bool testCredential, const vector<uint8_t>& challenge,
         size_t proofOfOwnershipCborSize) {
+    if (!validateId(__func__)) {
+        return std::nullopt;
+    }
+
     vector<uint8_t> signatureOfToBeSigned(EIC_ECDSA_P256_SIGNATURE_SIZE);
     if (!eicPresentationProveOwnership(&ctx_, docType.c_str(), docType.size(), testCredential,
                                        challenge.data(), challenge.size(), proofOfOwnershipCborSize,
                                        signatureOfToBeSigned.data())) {
-        return {};
+        return std::nullopt;
     }
     return signatureOfToBeSigned;
 }
diff --git a/identity/aidl/default/FakeSecureHardwareProxy.h b/identity/aidl/default/FakeSecureHardwareProxy.h
index 6852c1a..df98c7a 100644
--- a/identity/aidl/default/FakeSecureHardwareProxy.h
+++ b/identity/aidl/default/FakeSecureHardwareProxy.h
@@ -27,21 +27,23 @@
 //
 class FakeSecureHardwareProvisioningProxy : public SecureHardwareProvisioningProxy {
   public:
-    FakeSecureHardwareProvisioningProxy();
+    FakeSecureHardwareProvisioningProxy() = default;
     virtual ~FakeSecureHardwareProvisioningProxy();
 
     bool initialize(bool testCredential) override;
 
-    bool initializeForUpdate(bool testCredential, string docType,
-                             vector<uint8_t> encryptedCredentialKeys) override;
+    bool initializeForUpdate(bool testCredential, const string& docType,
+                             const vector<uint8_t>& encryptedCredentialKeys) override;
 
     bool shutdown() override;
 
+    optional<uint32_t> getId() override;
+
     // Returns public key certificate.
     optional<vector<uint8_t>> createCredentialKey(const vector<uint8_t>& challenge,
                                                   const vector<uint8_t>& applicationId) override;
 
-    bool startPersonalization(int accessControlProfileCount, vector<int> entryCounts,
+    bool startPersonalization(int accessControlProfileCount, const vector<int>& entryCounts,
                               const string& docType,
                               size_t expectedProofOfProvisioningSize) override;
 
@@ -67,21 +69,81 @@
     optional<vector<uint8_t>> finishGetCredentialData(const string& docType) override;
 
   protected:
-    EicProvisioning ctx_;
+    // See docs for id_.
+    //
+    bool validateId(const string& callerName);
+
+    // We use a singleton libeic object, shared by all proxy instances.  This is to
+    // properly simulate a situation where libeic is used on constrained hardware
+    // with only enough RAM for a single instance of the libeic object.
+    //
+    static EicProvisioning ctx_;
+
+    // On the HAL side we keep track of the ID that was assigned to the libeic object
+    // created in secure hardware. For every call into libeic we validate that this
+    // identifier matches what is on the secure side. This is what the validateId()
+    // method does.
+    //
+    uint32_t id_ = 0;
+};
+
+// This implementation uses libEmbeddedIC in-process.
+//
+class FakeSecureHardwareSessionProxy : public SecureHardwareSessionProxy {
+  public:
+    FakeSecureHardwareSessionProxy() = default;
+    virtual ~FakeSecureHardwareSessionProxy();
+
+    bool initialize() override;
+
+    bool shutdown() override;
+
+    optional<uint32_t> getId() override;
+
+    optional<uint64_t> getAuthChallenge() override;
+
+    // Returns private key
+    optional<vector<uint8_t>> getEphemeralKeyPair() override;
+
+    bool setReaderEphemeralPublicKey(const vector<uint8_t>& readerEphemeralPublicKey) override;
+
+    bool setSessionTranscript(const vector<uint8_t>& sessionTranscript) override;
+
+  protected:
+    // See docs for id_.
+    //
+    bool validateId(const string& callerName);
+
+    // We use a singleton libeic object, shared by all proxy instances.  This is to
+    // properly simulate a situation where libeic is used on constrained hardware
+    // with only enough RAM for a single instance of the libeic object.
+    //
+    static EicSession ctx_;
+
+    // On the HAL side we keep track of the ID that was assigned to the libeic object
+    // created in secure hardware. For every call into libeic we validate that this
+    // identifier matches what is on the secure side. This is what the validateId()
+    // method does.
+    //
+    uint32_t id_ = 0;
 };
 
 // This implementation uses libEmbeddedIC in-process.
 //
 class FakeSecureHardwarePresentationProxy : public SecureHardwarePresentationProxy {
   public:
-    FakeSecureHardwarePresentationProxy();
+    FakeSecureHardwarePresentationProxy() = default;
     virtual ~FakeSecureHardwarePresentationProxy();
 
-    bool initialize(bool testCredential, string docType,
-                    vector<uint8_t> encryptedCredentialKeys) override;
+    bool initialize(uint32_t sessionId, bool testCredential, const string& docType,
+                    const vector<uint8_t>& encryptedCredentialKeys) override;
+
+    bool shutdown() override;
+
+    optional<uint32_t> getId() override;
 
     // Returns publicKeyCert (1st component) and signingKeyBlob (2nd component)
-    optional<pair<vector<uint8_t>, vector<uint8_t>>> generateSigningKeyPair(string docType,
+    optional<pair<vector<uint8_t>, vector<uint8_t>>> generateSigningKeyPair(const string& docType,
                                                                             time_t now) override;
 
     // Returns private key
@@ -133,10 +195,23 @@
                                              const vector<uint8_t>& challenge,
                                              size_t proofOfOwnershipCborSize) override;
 
-    bool shutdown() override;
-
   protected:
-    EicPresentation ctx_;
+    // See docs for id_.
+    //
+    bool validateId(const string& callerName);
+
+    // We use a singleton libeic object, shared by all proxy instances.  This is to
+    // properly simulate a situation where libeic is used on constrained hardware
+    // with only enough RAM for a single instance of the libeic object.
+    //
+    static EicPresentation ctx_;
+
+    // On the HAL side we keep track of the ID that was assigned to the libeic object
+    // created in secure hardware. For every call into libeic we validate that this
+    // identifier matches what is on the secure side. This is what the validateId()
+    // method does.
+    //
+    uint32_t id_ = 0;
 };
 
 // Factory implementation.
@@ -150,6 +225,10 @@
         return new FakeSecureHardwareProvisioningProxy();
     }
 
+    sp<SecureHardwareSessionProxy> createSessionProxy() override {
+        return new FakeSecureHardwareSessionProxy();
+    }
+
     sp<SecureHardwarePresentationProxy> createPresentationProxy() override {
         return new FakeSecureHardwarePresentationProxy();
     }
diff --git a/identity/aidl/default/android.hardware.identity_credential.xml b/identity/aidl/default/android.hardware.identity_credential.xml
index 5149792..20b2710 100644
--- a/identity/aidl/default/android.hardware.identity_credential.xml
+++ b/identity/aidl/default/android.hardware.identity_credential.xml
@@ -14,5 +14,5 @@
      limitations under the License.
 -->
 <permissions>
-  <feature name="android.hardware.identity_credential" version="202101" />
+  <feature name="android.hardware.identity_credential" version="202201" />
 </permissions>
diff --git a/identity/aidl/default/common/IdentityCredential.cpp b/identity/aidl/default/common/IdentityCredential.cpp
index 95557b5..7678ecb 100644
--- a/identity/aidl/default/common/IdentityCredential.cpp
+++ b/identity/aidl/default/common/IdentityCredential.cpp
@@ -72,14 +72,38 @@
     testCredential_ = testCredentialItem->value();
 
     encryptedCredentialKeys_ = encryptedCredentialKeysItem->value();
-    if (!hwProxy_->initialize(testCredential_, docType_, encryptedCredentialKeys_)) {
-        LOG(ERROR) << "hwProxy->initialize failed";
-        return false;
+
+    // If in a session, delay the initialization of the proxy.
+    //
+    if (!session_) {
+        ndk::ScopedAStatus status = ensureHwProxy();
+        if (!status.isOk()) {
+            LOG(ERROR) << "Error initializing hw proxy";
+            return IIdentityCredentialStore::STATUS_FAILED;
+        }
     }
 
     return IIdentityCredentialStore::STATUS_OK;
 }
 
+ndk::ScopedAStatus IdentityCredential::ensureHwProxy() {
+    if (hwProxy_) {
+        return ndk::ScopedAStatus::ok();
+    }
+    hwProxy_ = hwProxyFactory_->createPresentationProxy();
+    if (!hwProxy_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Error creating hw proxy"));
+    }
+    uint64_t sessionId = session_ ? session_->getSessionId() : EIC_PRESENTATION_ID_UNSET;
+    if (!hwProxy_->initialize(sessionId, testCredential_, docType_, encryptedCredentialKeys_)) {
+        hwProxy_.clear();
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Error initializing hw proxy"));
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
 ndk::ScopedAStatus IdentityCredential::deleteCredential(
         vector<uint8_t>* outProofOfDeletionSignature) {
     return deleteCredentialCommon({}, false, outProofOfDeletionSignature);
@@ -93,6 +117,14 @@
 ndk::ScopedAStatus IdentityCredential::deleteCredentialCommon(
         const vector<uint8_t>& challenge, bool includeChallenge,
         vector<uint8_t>* outProofOfDeletionSignature) {
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
     if (challenge.size() > 32) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
                 IIdentityCredentialStore::STATUS_INVALID_DATA, "Challenge too big"));
@@ -128,6 +160,14 @@
 
 ndk::ScopedAStatus IdentityCredential::proveOwnership(
         const vector<uint8_t>& challenge, vector<uint8_t>* outProofOfOwnershipSignature) {
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
     if (challenge.size() > 32) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
                 IIdentityCredentialStore::STATUS_INVALID_DATA, "Challenge too big"));
@@ -159,6 +199,14 @@
 }
 
 ndk::ScopedAStatus IdentityCredential::createEphemeralKeyPair(vector<uint8_t>* outKeyPair) {
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
     optional<vector<uint8_t>> ephemeralPriv = hwProxy_->createEphemeralKeyPair();
     if (!ephemeralPriv) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
@@ -186,11 +234,23 @@
 
 ndk::ScopedAStatus IdentityCredential::setReaderEphemeralPublicKey(
         const vector<uint8_t>& publicKey) {
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
     readerPublicKey_ = publicKey;
     return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus IdentityCredential::createAuthChallenge(int64_t* outChallenge) {
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
     optional<uint64_t> challenge = hwProxy_->createAuthChallenge();
     if (!challenge) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
@@ -217,16 +277,22 @@
         const HardwareAuthToken& authToken, const vector<uint8_t>& itemsRequest,
         const vector<uint8_t>& signingKeyBlob, const vector<uint8_t>& sessionTranscript,
         const vector<uint8_t>& readerSignature, const vector<int32_t>& requestCounts) {
-    std::unique_ptr<cppbor::Item> sessionTranscriptItem;
-    if (sessionTranscript.size() > 0) {
-        auto [item, _, message] = cppbor::parse(sessionTranscript);
-        if (item == nullptr) {
-            return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
-                    IIdentityCredentialStore::STATUS_INVALID_DATA,
-                    "SessionTranscript contains invalid CBOR"));
-        }
-        sessionTranscriptItem = std::move(item);
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
     }
+
+    // If in a session, ensure the passed-in session transcript matches the
+    // session transcript from the session.
+    if (session_) {
+        if (sessionTranscript != session_->getSessionTranscript()) {
+            return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                    IIdentityCredentialStore::STATUS_SESSION_TRANSCRIPT_MISMATCH,
+                    "In a session and passed-in SessionTranscript doesn't match the one "
+                    "from the session"));
+        }
+    }
+
     if (numStartRetrievalCalls_ > 0) {
         if (sessionTranscript_ != sessionTranscript) {
             LOG(ERROR) << "Session Transcript changed";
@@ -390,32 +456,36 @@
         }
     }
 
-    // TODO: move this check to the TA
-#if 1
-    // To prevent replay-attacks, we check that the public part of the ephemeral
-    // key we previously created, is present in the DeviceEngagement part of
-    // SessionTranscript as a COSE_Key, in uncompressed form.
-    //
-    // We do this by just searching for the X and Y coordinates.
-    if (sessionTranscript.size() > 0) {
-        auto [getXYSuccess, ePubX, ePubY] = support::ecPublicKeyGetXandY(ephemeralPublicKey_);
-        if (!getXYSuccess) {
-            return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
-                    IIdentityCredentialStore::STATUS_EPHEMERAL_PUBLIC_KEY_NOT_FOUND,
-                    "Error extracting X and Y from ePub"));
-        }
-        if (sessionTranscript.size() > 0 &&
-            !(memmem(sessionTranscript.data(), sessionTranscript.size(), ePubX.data(),
-                     ePubX.size()) != nullptr &&
-              memmem(sessionTranscript.data(), sessionTranscript.size(), ePubY.data(),
-                     ePubY.size()) != nullptr)) {
-            return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
-                    IIdentityCredentialStore::STATUS_EPHEMERAL_PUBLIC_KEY_NOT_FOUND,
-                    "Did not find ephemeral public key's X and Y coordinates in "
-                    "SessionTranscript (make sure leading zeroes are not used)"));
+    if (session_) {
+        // If presenting in a session, the TA has already done this check.
+
+    } else {
+        // To prevent replay-attacks, we check that the public part of the ephemeral
+        // key we previously created, is present in the DeviceEngagement part of
+        // SessionTranscript as a COSE_Key, in uncompressed form.
+        //
+        // We do this by just searching for the X and Y coordinates.
+        //
+        // Would be nice to move this check to the TA.
+        if (sessionTranscript.size() > 0) {
+            auto [getXYSuccess, ePubX, ePubY] = support::ecPublicKeyGetXandY(ephemeralPublicKey_);
+            if (!getXYSuccess) {
+                return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                        IIdentityCredentialStore::STATUS_EPHEMERAL_PUBLIC_KEY_NOT_FOUND,
+                        "Error extracting X and Y from ePub"));
+            }
+            if (sessionTranscript.size() > 0 &&
+                !(memmem(sessionTranscript.data(), sessionTranscript.size(), ePubX.data(),
+                         ePubX.size()) != nullptr &&
+                  memmem(sessionTranscript.data(), sessionTranscript.size(), ePubY.data(),
+                         ePubY.size()) != nullptr)) {
+                return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                        IIdentityCredentialStore::STATUS_EPHEMERAL_PUBLIC_KEY_NOT_FOUND,
+                        "Did not find ephemeral public key's X and Y coordinates in "
+                        "SessionTranscript (make sure leading zeroes are not used)"));
+            }
         }
     }
-#endif
 
     // itemsRequest: If non-empty, contains request data that may be signed by the
     // reader.  The content can be defined in the way appropriate for the
@@ -537,21 +607,38 @@
 
     // Finally, pass info so the HMAC key can be derived and the TA can start
     // creating the DeviceNameSpaces CBOR...
-    if (sessionTranscript_.size() > 0 && readerPublicKey_.size() > 0 && signingKeyBlob.size() > 0) {
-        // We expect the reader ephemeral public key to be same size and curve
-        // as the ephemeral key we generated (e.g. P-256 key), otherwise ECDH
-        // won't work. So its length should be 65 bytes and it should be
-        // starting with 0x04.
-        if (readerPublicKey_.size() != 65 || readerPublicKey_[0] != 0x04) {
-            return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
-                    IIdentityCredentialStore::STATUS_FAILED,
-                    "Reader public key is not in expected format"));
+    if (!session_) {
+        if (sessionTranscript_.size() > 0 && readerPublicKey_.size() > 0 &&
+            signingKeyBlob.size() > 0) {
+            // We expect the reader ephemeral public key to be same size and curve
+            // as the ephemeral key we generated (e.g. P-256 key), otherwise ECDH
+            // won't work. So its length should be 65 bytes and it should be
+            // starting with 0x04.
+            if (readerPublicKey_.size() != 65 || readerPublicKey_[0] != 0x04) {
+                return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                        IIdentityCredentialStore::STATUS_FAILED,
+                        "Reader public key is not in expected format"));
+            }
+            vector<uint8_t> pubKeyP256(readerPublicKey_.begin() + 1, readerPublicKey_.end());
+            if (!hwProxy_->calcMacKey(sessionTranscript_, pubKeyP256, signingKeyBlob, docType_,
+                                      numNamespacesWithValues, expectedDeviceNameSpacesSize_)) {
+                return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                        IIdentityCredentialStore::STATUS_FAILED,
+                        "Error starting retrieving entries"));
+            }
         }
-        vector<uint8_t> pubKeyP256(readerPublicKey_.begin() + 1, readerPublicKey_.end());
-        if (!hwProxy_->calcMacKey(sessionTranscript_, pubKeyP256, signingKeyBlob, docType_,
-                                  numNamespacesWithValues, expectedDeviceNameSpacesSize_)) {
-            return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
-                    IIdentityCredentialStore::STATUS_FAILED, "Error starting retrieving entries"));
+    } else {
+        if (session_->getSessionTranscript().size() > 0 &&
+            session_->getReaderEphemeralPublicKey().size() > 0 && signingKeyBlob.size() > 0) {
+            // Don't actually pass the reader ephemeral public key in, the TA will get
+            // it from the session object.
+            //
+            if (!hwProxy_->calcMacKey(sessionTranscript_, {}, signingKeyBlob, docType_,
+                                      numNamespacesWithValues, expectedDeviceNameSpacesSize_)) {
+                return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                        IIdentityCredentialStore::STATUS_FAILED,
+                        "Error starting retrieving entries"));
+            }
         }
     }
 
@@ -665,6 +752,11 @@
 ndk::ScopedAStatus IdentityCredential::startRetrieveEntryValue(
         const string& nameSpace, const string& name, int32_t entrySize,
         const vector<int32_t>& accessControlProfileIds) {
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
+
     if (name.empty()) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
                 IIdentityCredentialStore::STATUS_INVALID_DATA, "Name cannot be empty"));
@@ -785,6 +877,11 @@
 
 ndk::ScopedAStatus IdentityCredential::retrieveEntryValue(const vector<uint8_t>& encryptedContent,
                                                           vector<uint8_t>* outContent) {
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
+
     optional<vector<uint8_t>> content = hwProxy_->retrieveEntryValue(
             encryptedContent, currentNameSpace_, currentName_, currentAccessControlProfileIds_);
     if (!content) {
@@ -829,6 +926,11 @@
 
 ndk::ScopedAStatus IdentityCredential::finishRetrieval(vector<uint8_t>* outMac,
                                                        vector<uint8_t>* outDeviceNameSpaces) {
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
+
     if (currentNameSpaceDeviceNameSpacesMap_.size() > 0) {
         deviceNameSpacesMap_.add(currentNameSpace_,
                                  std::move(currentNameSpaceDeviceNameSpacesMap_));
@@ -846,18 +948,23 @@
                         .c_str()));
     }
 
-    // If there's no signing key or no sessionTranscript or no reader ephemeral
-    // public key, we return the empty MAC.
+    // If the TA calculated a MAC (it might not have), format it as a COSE_Mac0
+    //
     optional<vector<uint8_t>> mac;
-    if (signingKeyBlob_.size() > 0 && sessionTranscript_.size() > 0 &&
-        readerPublicKey_.size() > 0) {
-        optional<vector<uint8_t>> digestToBeMaced = hwProxy_->finishRetrieval();
-        if (!digestToBeMaced || digestToBeMaced.value().size() != 32) {
+    optional<vector<uint8_t>> digestToBeMaced = hwProxy_->finishRetrieval();
+
+    // The MAC not being set means an error occurred.
+    if (!digestToBeMaced) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_INVALID_DATA, "Error generating digestToBeMaced"));
+    }
+    // Size 0 means that the MAC isn't set. If it's set, it has to be 32 bytes.
+    if (digestToBeMaced.value().size() != 0) {
+        if (digestToBeMaced.value().size() != 32) {
             return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
                     IIdentityCredentialStore::STATUS_INVALID_DATA,
-                    "Error generating digestToBeMaced"));
+                    "Unexpected size for digestToBeMaced"));
         }
-        // Now construct COSE_Mac0 from the returned MAC...
         mac = support::coseMacWithDigest(digestToBeMaced.value(), {} /* data */);
     }
 
@@ -868,6 +975,15 @@
 
 ndk::ScopedAStatus IdentityCredential::generateSigningKeyPair(
         vector<uint8_t>* outSigningKeyBlob, Certificate* outSigningKeyCertificate) {
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
+    ndk::ScopedAStatus status = ensureHwProxy();
+    if (!status.isOk()) {
+        return status;
+    }
+
     time_t now = time(NULL);
     optional<pair<vector<uint8_t>, vector<uint8_t>>> pair =
             hwProxy_->generateSigningKeyPair(docType_, now);
@@ -885,9 +1001,18 @@
 
 ndk::ScopedAStatus IdentityCredential::updateCredential(
         shared_ptr<IWritableIdentityCredential>* outWritableCredential) {
-    sp<SecureHardwareProvisioningProxy> hwProxy = hwProxyFactory_->createProvisioningProxy();
+    if (session_) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Cannot be called in a session"));
+    }
+    sp<SecureHardwareProvisioningProxy> provisioningHwProxy =
+            hwProxyFactory_->createProvisioningProxy();
+    if (!provisioningHwProxy) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED, "Error creating provisioning proxy"));
+    }
     shared_ptr<WritableIdentityCredential> wc =
-            ndk::SharedRefBase::make<WritableIdentityCredential>(hwProxy, docType_,
+            ndk::SharedRefBase::make<WritableIdentityCredential>(provisioningHwProxy, docType_,
                                                                  testCredential_);
     if (!wc->initializeForUpdate(encryptedCredentialKeys_)) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
diff --git a/identity/aidl/default/common/IdentityCredential.h b/identity/aidl/default/common/IdentityCredential.h
index ef9d133..2935fb8 100644
--- a/identity/aidl/default/common/IdentityCredential.h
+++ b/identity/aidl/default/common/IdentityCredential.h
@@ -30,6 +30,7 @@
 #include <cppbor.h>
 
 #include "IdentityCredentialStore.h"
+#include "PresentationSession.h"
 #include "SecureHardwareProxy.h"
 
 namespace aidl::android::hardware::identity {
@@ -46,11 +47,11 @@
 class IdentityCredential : public BnIdentityCredential {
   public:
     IdentityCredential(sp<SecureHardwareProxyFactory> hwProxyFactory,
-                       sp<SecureHardwarePresentationProxy> hwProxy,
-                       const vector<uint8_t>& credentialData)
+                       const vector<uint8_t>& credentialData,
+                       std::shared_ptr<PresentationSession> session)
         : hwProxyFactory_(hwProxyFactory),
-          hwProxy_(hwProxy),
           credentialData_(credentialData),
+          session_(std::move(session)),
           numStartRetrievalCalls_(0),
           expectedDeviceNameSpacesSize_(0) {}
 
@@ -94,10 +95,13 @@
                                               bool includeChallenge,
                                               vector<uint8_t>* outProofOfDeletionSignature);
 
+    // Creates and initializes hwProxy_.
+    ndk::ScopedAStatus ensureHwProxy();
+
     // Set by constructor
     sp<SecureHardwareProxyFactory> hwProxyFactory_;
-    sp<SecureHardwarePresentationProxy> hwProxy_;
     vector<uint8_t> credentialData_;
+    shared_ptr<PresentationSession> session_;
     int numStartRetrievalCalls_;
 
     // Set by initialize()
@@ -105,6 +109,9 @@
     bool testCredential_;
     vector<uint8_t> encryptedCredentialKeys_;
 
+    // Set by ensureHwProxy()
+    sp<SecureHardwarePresentationProxy> hwProxy_;
+
     // Set by createEphemeralKeyPair()
     vector<uint8_t> ephemeralPublicKey_;
 
diff --git a/identity/aidl/default/common/IdentityCredentialStore.cpp b/identity/aidl/default/common/IdentityCredentialStore.cpp
index e6b5466..4703ffe 100644
--- a/identity/aidl/default/common/IdentityCredentialStore.cpp
+++ b/identity/aidl/default/common/IdentityCredentialStore.cpp
@@ -20,6 +20,7 @@
 
 #include "IdentityCredential.h"
 #include "IdentityCredentialStore.h"
+#include "PresentationSession.h"
 #include "WritableIdentityCredential.h"
 
 namespace aidl::android::hardware::identity {
@@ -61,9 +62,8 @@
                 "Unsupported cipher suite"));
     }
 
-    sp<SecureHardwarePresentationProxy> hwProxy = hwProxyFactory_->createPresentationProxy();
-    shared_ptr<IdentityCredential> credential =
-            ndk::SharedRefBase::make<IdentityCredential>(hwProxyFactory_, hwProxy, credentialData);
+    shared_ptr<IdentityCredential> credential = ndk::SharedRefBase::make<IdentityCredential>(
+            hwProxyFactory_, credentialData, nullptr /* session */);
     auto ret = credential->initialize();
     if (ret != IIdentityCredentialStore::STATUS_OK) {
         return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
@@ -73,4 +73,25 @@
     return ndk::ScopedAStatus::ok();
 }
 
+ndk::ScopedAStatus IdentityCredentialStore::createPresentationSession(
+        CipherSuite cipherSuite, shared_ptr<IPresentationSession>* outSession) {
+    // We only support CIPHERSUITE_ECDHE_HKDF_ECDSA_WITH_AES_256_GCM_SHA256 right now.
+    if (cipherSuite != CipherSuite::CIPHERSUITE_ECDHE_HKDF_ECDSA_WITH_AES_256_GCM_SHA256) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_CIPHER_SUITE_NOT_SUPPORTED,
+                "Unsupported cipher suite"));
+    }
+
+    sp<SecureHardwareSessionProxy> hwProxy = hwProxyFactory_->createSessionProxy();
+    shared_ptr<PresentationSession> session =
+            ndk::SharedRefBase::make<PresentationSession>(hwProxyFactory_, hwProxy);
+    auto ret = session->initialize();
+    if (ret != IIdentityCredentialStore::STATUS_OK) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                int(ret), "Error initializing PresentationSession"));
+    }
+    *outSession = session;
+    return ndk::ScopedAStatus::ok();
+}
+
 }  // namespace aidl::android::hardware::identity
diff --git a/identity/aidl/default/common/IdentityCredentialStore.h b/identity/aidl/default/common/IdentityCredentialStore.h
index d35e632..77b894d 100644
--- a/identity/aidl/default/common/IdentityCredentialStore.h
+++ b/identity/aidl/default/common/IdentityCredentialStore.h
@@ -47,6 +47,9 @@
     ndk::ScopedAStatus getCredential(CipherSuite cipherSuite, const vector<uint8_t>& credentialData,
                                      shared_ptr<IIdentityCredential>* outCredential) override;
 
+    ndk::ScopedAStatus createPresentationSession(
+            CipherSuite cipherSuite, shared_ptr<IPresentationSession>* outSession) override;
+
   private:
     sp<SecureHardwareProxyFactory> hwProxyFactory_;
 };
diff --git a/identity/aidl/default/common/PresentationSession.cpp b/identity/aidl/default/common/PresentationSession.cpp
new file mode 100644
index 0000000..fbd8972
--- /dev/null
+++ b/identity/aidl/default/common/PresentationSession.cpp
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "PresentationSession"
+
+#include "PresentationSession.h"
+#include "IdentityCredentialStore.h"
+
+#include <android/hardware/identity/support/IdentityCredentialSupport.h>
+
+#include <string.h>
+
+#include <android-base/logging.h>
+#include <android-base/stringprintf.h>
+
+#include <cppbor.h>
+#include <cppbor_parse.h>
+
+#include "FakeSecureHardwareProxy.h"
+#include "IdentityCredential.h"
+#include "PresentationSession.h"
+
+namespace aidl::android::hardware::identity {
+
+using ::std::optional;
+
+using namespace ::android::hardware::identity;
+
+PresentationSession::~PresentationSession() {}
+
+int PresentationSession::initialize() {
+    if (!hwProxy_->initialize()) {
+        LOG(ERROR) << "hwProxy->initialize failed";
+        return IIdentityCredentialStore::STATUS_FAILED;
+    }
+
+    optional<uint64_t> id = hwProxy_->getId();
+    if (!id) {
+        LOG(ERROR) << "Error getting id for session";
+        return IIdentityCredentialStore::STATUS_FAILED;
+    }
+    id_ = id.value();
+
+    optional<vector<uint8_t>> ephemeralKeyPriv = hwProxy_->getEphemeralKeyPair();
+    if (!ephemeralKeyPriv) {
+        LOG(ERROR) << "Error getting ephemeral private key for session";
+        return IIdentityCredentialStore::STATUS_FAILED;
+    }
+    optional<vector<uint8_t>> ephemeralKeyPair =
+            support::ecPrivateKeyToKeyPair(ephemeralKeyPriv.value());
+    if (!ephemeralKeyPair) {
+        LOG(ERROR) << "Error creating ephemeral key-pair";
+        return IIdentityCredentialStore::STATUS_FAILED;
+    }
+    ephemeralKeyPair_ = ephemeralKeyPair.value();
+
+    optional<uint64_t> authChallenge = hwProxy_->getAuthChallenge();
+    if (!authChallenge) {
+        LOG(ERROR) << "Error getting authChallenge for session";
+        return IIdentityCredentialStore::STATUS_FAILED;
+    }
+    authChallenge_ = authChallenge.value();
+
+    return IIdentityCredentialStore::STATUS_OK;
+}
+
+ndk::ScopedAStatus PresentationSession::getEphemeralKeyPair(vector<uint8_t>* outKeyPair) {
+    *outKeyPair = ephemeralKeyPair_;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PresentationSession::getAuthChallenge(int64_t* outChallenge) {
+    *outChallenge = authChallenge_;
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PresentationSession::setReaderEphemeralPublicKey(
+        const vector<uint8_t>& publicKey) {
+    // We expect the reader ephemeral public key to be same size and curve
+    // as the ephemeral key we generated (e.g. P-256 key), otherwise ECDH
+    // won't work. So its length should be 65 bytes and it should be
+    // starting with 0x04.
+    if (publicKey.size() != 65 || publicKey[0] != 0x04) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED,
+                "Reader public key is not in expected format"));
+    }
+    readerPublicKey_ = publicKey;
+    vector<uint8_t> pubKeyP256(publicKey.begin() + 1, publicKey.end());
+    if (!hwProxy_->setReaderEphemeralPublicKey(pubKeyP256)) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED,
+                "Error setting readerEphemeralPublicKey for session"));
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PresentationSession::setSessionTranscript(
+        const vector<uint8_t>& sessionTranscript) {
+    sessionTranscript_ = sessionTranscript;
+    if (!hwProxy_->setSessionTranscript(sessionTranscript)) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                IIdentityCredentialStore::STATUS_FAILED,
+                "Error setting SessionTranscript for session"));
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PresentationSession::getCredential(
+        const vector<uint8_t>& credentialData, shared_ptr<IIdentityCredential>* outCredential) {
+    shared_ptr<PresentationSession> p = ref<PresentationSession>();
+    shared_ptr<IdentityCredential> credential =
+            ndk::SharedRefBase::make<IdentityCredential>(hwProxyFactory_, credentialData, p);
+    int ret = credential->initialize();
+    if (ret != IIdentityCredentialStore::STATUS_OK) {
+        return ndk::ScopedAStatus(AStatus_fromServiceSpecificErrorWithMessage(
+                ret, "Error initializing IdentityCredential"));
+    }
+    *outCredential = std::move(credential);
+
+    return ndk::ScopedAStatus::ok();
+}
+
+uint64_t PresentationSession::getSessionId() {
+    return id_;
+}
+
+vector<uint8_t> PresentationSession::getSessionTranscript() {
+    return sessionTranscript_;
+}
+
+vector<uint8_t> PresentationSession::getReaderEphemeralPublicKey() {
+    return readerPublicKey_;
+}
+
+}  // namespace aidl::android::hardware::identity
diff --git a/identity/aidl/default/common/PresentationSession.h b/identity/aidl/default/common/PresentationSession.h
new file mode 100644
index 0000000..76ca67b
--- /dev/null
+++ b/identity/aidl/default/common/PresentationSession.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_IDENTITY_PRESENTATIONSESSION_H
+#define ANDROID_HARDWARE_IDENTITY_PRESENTATIONSESSION_H
+
+#include <aidl/android/hardware/identity/BnPresentationSession.h>
+#include <android/hardware/identity/support/IdentityCredentialSupport.h>
+
+#include <vector>
+
+#include <cppbor.h>
+
+#include "IdentityCredentialStore.h"
+#include "SecureHardwareProxy.h"
+
+namespace aidl::android::hardware::identity {
+
+using ::aidl::android::hardware::keymaster::HardwareAuthToken;
+using ::aidl::android::hardware::keymaster::VerificationToken;
+using ::android::sp;
+using ::android::hardware::identity::SecureHardwareSessionProxy;
+using ::std::vector;
+
+class PresentationSession : public BnPresentationSession {
+  public:
+    PresentationSession(sp<SecureHardwareProxyFactory> hwProxyFactory,
+                        sp<SecureHardwareSessionProxy> hwProxy)
+        : hwProxyFactory_(std::move(hwProxyFactory)), hwProxy_(std::move(hwProxy)) {}
+
+    virtual ~PresentationSession();
+
+    // Creates ephemeral key and auth-challenge in TA. Returns a status code from
+    // IIdentityCredentialStore. Must be called right after construction.
+    int initialize();
+
+    uint64_t getSessionId();
+
+    vector<uint8_t> getSessionTranscript();
+    vector<uint8_t> getReaderEphemeralPublicKey();
+
+    // Methods from IPresentationSession follow.
+    ndk::ScopedAStatus getEphemeralKeyPair(vector<uint8_t>* outKeyPair) override;
+    ndk::ScopedAStatus getAuthChallenge(int64_t* outChallenge) override;
+    ndk::ScopedAStatus setReaderEphemeralPublicKey(const vector<uint8_t>& publicKey) override;
+    ndk::ScopedAStatus setSessionTranscript(const vector<uint8_t>& sessionTranscript) override;
+
+    ndk::ScopedAStatus getCredential(const vector<uint8_t>& credentialData,
+                                     shared_ptr<IIdentityCredential>* outCredential) override;
+
+  private:
+    // Set by constructor
+    sp<SecureHardwareProxyFactory> hwProxyFactory_;
+    sp<SecureHardwareSessionProxy> hwProxy_;
+
+    // Set by initialize()
+    uint64_t id_;
+    vector<uint8_t> ephemeralKeyPair_;
+    uint64_t authChallenge_;
+
+    // Set by setReaderEphemeralPublicKey()
+    vector<uint8_t> readerPublicKey_;
+
+    // Set by setSessionTranscript()
+    vector<uint8_t> sessionTranscript_;
+};
+
+}  // namespace aidl::android::hardware::identity
+
+#endif  // ANDROID_HARDWARE_IDENTITY_PRESENTATIONSESSION_H
diff --git a/identity/aidl/default/common/SecureHardwareProxy.h b/identity/aidl/default/common/SecureHardwareProxy.h
index a1ed1ef..a580444 100644
--- a/identity/aidl/default/common/SecureHardwareProxy.h
+++ b/identity/aidl/default/common/SecureHardwareProxy.h
@@ -42,6 +42,7 @@
 // Forward declare.
 //
 class SecureHardwareProvisioningProxy;
+class SecureHardwareSessionProxy;
 class SecureHardwarePresentationProxy;
 
 // This is a class used to create proxies.
@@ -52,6 +53,7 @@
     virtual ~SecureHardwareProxyFactory() {}
 
     virtual sp<SecureHardwareProvisioningProxy> createProvisioningProxy() = 0;
+    virtual sp<SecureHardwareSessionProxy> createSessionProxy() = 0;
     virtual sp<SecureHardwarePresentationProxy> createPresentationProxy() = 0;
 };
 
@@ -64,8 +66,12 @@
 
     virtual bool initialize(bool testCredential) = 0;
 
-    virtual bool initializeForUpdate(bool testCredential, string docType,
-                                     vector<uint8_t> encryptedCredentialKeys) = 0;
+    virtual bool initializeForUpdate(bool testCredential, const string& docType,
+                                     const vector<uint8_t>& encryptedCredentialKeys) = 0;
+
+    virtual optional<uint32_t> getId() = 0;
+
+    virtual bool shutdown() = 0;
 
     // Returns public key certificate chain with attestation.
     //
@@ -76,7 +82,7 @@
     virtual optional<vector<uint8_t>> createCredentialKey(const vector<uint8_t>& challenge,
                                                           const vector<uint8_t>& applicationId) = 0;
 
-    virtual bool startPersonalization(int accessControlProfileCount, vector<int> entryCounts,
+    virtual bool startPersonalization(int accessControlProfileCount, const vector<int>& entryCounts,
                                       const string& docType,
                                       size_t expectedProofOfProvisioningSize) = 0;
 
@@ -98,8 +104,6 @@
 
     // Returns encryptedCredentialKeys (80 bytes).
     virtual optional<vector<uint8_t>> finishGetCredentialData(const string& docType) = 0;
-
-    virtual bool shutdown() = 0;
 };
 
 enum AccessCheckResult {
@@ -110,6 +114,30 @@
     kReaderAuthenticationFailed,
 };
 
+// The proxy used for sessions.
+//
+class SecureHardwareSessionProxy : public RefBase {
+  public:
+    SecureHardwareSessionProxy() {}
+
+    virtual ~SecureHardwareSessionProxy() {}
+
+    virtual bool initialize() = 0;
+
+    virtual optional<uint32_t> getId() = 0;
+
+    virtual bool shutdown() = 0;
+
+    virtual optional<uint64_t> getAuthChallenge() = 0;
+
+    // Returns private key
+    virtual optional<vector<uint8_t>> getEphemeralKeyPair() = 0;
+
+    virtual bool setReaderEphemeralPublicKey(const vector<uint8_t>& readerEphemeralPublicKey) = 0;
+
+    virtual bool setSessionTranscript(const vector<uint8_t>& sessionTranscript) = 0;
+};
+
 // The proxy used for presentation.
 //
 class SecureHardwarePresentationProxy : public RefBase {
@@ -117,12 +145,16 @@
     SecureHardwarePresentationProxy() {}
     virtual ~SecureHardwarePresentationProxy() {}
 
-    virtual bool initialize(bool testCredential, string docType,
-                            vector<uint8_t> encryptedCredentialKeys) = 0;
+    virtual bool initialize(uint32_t sessionId, bool testCredential, const string& docType,
+                            const vector<uint8_t>& encryptedCredentialKeys) = 0;
+
+    virtual optional<uint32_t> getId() = 0;
+
+    virtual bool shutdown() = 0;
 
     // Returns publicKeyCert (1st component) and signingKeyBlob (2nd component)
-    virtual optional<pair<vector<uint8_t>, vector<uint8_t>>> generateSigningKeyPair(string docType,
-                                                                                    time_t now) = 0;
+    virtual optional<pair<vector<uint8_t>, vector<uint8_t>>> generateSigningKeyPair(
+            const string& docType, time_t now) = 0;
 
     // Returns private key
     virtual optional<vector<uint8_t>> createEphemeralKeyPair() = 0;
@@ -174,8 +206,6 @@
     virtual optional<vector<uint8_t>> proveOwnership(const string& docType, bool testCredential,
                                                      const vector<uint8_t>& challenge,
                                                      size_t proofOfOwnershipCborSize) = 0;
-
-    virtual bool shutdown() = 0;
 };
 
 }  // namespace android::hardware::identity
diff --git a/identity/aidl/default/identity-default.xml b/identity/aidl/default/identity-default.xml
index a074250..cc0ddc7 100644
--- a/identity/aidl/default/identity-default.xml
+++ b/identity/aidl/default/identity-default.xml
@@ -1,7 +1,7 @@
 <manifest version="1.0" type="device">
     <hal format="aidl">
         <name>android.hardware.identity</name>
-        <version>3</version>
+        <version>4</version>
         <interface>
             <name>IIdentityCredentialStore</name>
             <instance>default</instance>
diff --git a/identity/aidl/default/libeic/EicCommon.h b/identity/aidl/default/libeic/EicCommon.h
index 476276e..2a08a35 100644
--- a/identity/aidl/default/libeic/EicCommon.h
+++ b/identity/aidl/default/libeic/EicCommon.h
@@ -21,6 +21,9 @@
 #ifndef ANDROID_HARDWARE_IDENTITY_EIC_COMMON_H
 #define ANDROID_HARDWARE_IDENTITY_EIC_COMMON_H
 
+// KeyMint auth-challenges are 64-bit numbers and 0 typically means unset.
+#define EIC_KM_AUTH_CHALLENGE_UNSET 0
+
 // Feature version 202009:
 //
 //         CredentialKeys = [
diff --git a/identity/aidl/default/libeic/EicOps.h b/identity/aidl/default/libeic/EicOps.h
index d4fcf0e..aa26e62 100644
--- a/identity/aidl/default/libeic/EicOps.h
+++ b/identity/aidl/default/libeic/EicOps.h
@@ -141,6 +141,10 @@
 // String length, see strlen(3).
 size_t eicStrLen(const char* s);
 
+// Locate a substring, see memmem(3)
+void* eicMemMem(const uint8_t* haystack, size_t haystackLen, const uint8_t* needle,
+                size_t needleLen);
+
 // Memory compare, see CRYPTO_memcmp(3SSL)
 //
 // It takes an amount of time dependent on len, but independent of the contents of the
@@ -151,6 +155,12 @@
 // Random number generation.
 bool eicOpsRandom(uint8_t* buf, size_t numBytes);
 
+// Creates a new non-zero identifier in |id|.
+//
+// Is guaranteed to be non-zero and different than what is already in |id|.
+//
+bool eicNextId(uint32_t* id);
+
 // If |testCredential| is true, returns the 128-bit AES Hardware-Bound Key (16 bytes).
 //
 // Otherwise returns all zeroes (16 bytes).
@@ -295,6 +305,8 @@
                              int verificationTokenSecurityLevel,
                              const uint8_t* verificationTokenMac, size_t verificationTokenMacSize);
 
+// Also see eicOpsLookupActiveSessionFromId() defined in EicSession.h
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/identity/aidl/default/libeic/EicPresentation.c b/identity/aidl/default/libeic/EicPresentation.c
index 0d03ae9..104a559 100644
--- a/identity/aidl/default/libeic/EicPresentation.c
+++ b/identity/aidl/default/libeic/EicPresentation.c
@@ -16,11 +16,17 @@
 
 #include "EicPresentation.h"
 #include "EicCommon.h"
+#include "EicSession.h"
 
 #include <inttypes.h>
 
-bool eicPresentationInit(EicPresentation* ctx, bool testCredential, const char* docType,
-                         size_t docTypeLength, const uint8_t* encryptedCredentialKeys,
+// Global used for assigning ids for presentation objects.
+//
+static uint32_t gPresentationLastIdAssigned = 0;
+
+bool eicPresentationInit(EicPresentation* ctx, uint32_t sessionId, bool testCredential,
+                         const char* docType, size_t docTypeLength,
+                         const uint8_t* encryptedCredentialKeys,
                          size_t encryptedCredentialKeysSize) {
     uint8_t credentialKeys[EIC_CREDENTIAL_KEYS_CBOR_SIZE_FEATURE_VERSION_202101];
     bool expectPopSha256 = false;
@@ -39,6 +45,13 @@
     }
 
     eicMemSet(ctx, '\0', sizeof(EicPresentation));
+    ctx->sessionId = sessionId;
+
+    if (!eicNextId(&gPresentationLastIdAssigned)) {
+        eicDebug("Error getting id for object");
+        return false;
+    }
+    ctx->id = gPresentationLastIdAssigned;
 
     if (!eicOpsDecryptAes128Gcm(eicOpsGetHardwareBoundKey(testCredential), encryptedCredentialKeys,
                                 encryptedCredentialKeysSize,
@@ -86,6 +99,23 @@
     if (expectPopSha256) {
         eicMemCpy(ctx->proofOfProvisioningSha256, credentialKeys + 54, EIC_SHA256_DIGEST_SIZE);
     }
+
+    eicDebug("Initialized presentation with id %" PRIu32, ctx->id);
+    return true;
+}
+
+bool eicPresentationShutdown(EicPresentation* ctx) {
+    if (ctx->id == 0) {
+        eicDebug("Trying to shut down presentation with id 0");
+        return false;
+    }
+    eicDebug("Shut down presentation with id %" PRIu32, ctx->id);
+    eicMemSet(ctx, '\0', sizeof(EicPresentation));
+    return true;
+}
+
+bool eicPresentationGetId(EicPresentation* ctx, uint32_t* outId) {
+    *outId = ctx->id;
     return true;
 }
 
@@ -174,7 +204,7 @@
             eicDebug("Failed generating random challenge");
             return false;
         }
-    } while (ctx->authChallenge == 0);
+    } while (ctx->authChallenge == EIC_KM_AUTH_CHALLENGE_UNSET);
     eicDebug("Created auth challenge %" PRIu64, ctx->authChallenge);
     *authChallenge = ctx->authChallenge;
     return true;
@@ -190,6 +220,24 @@
                                            int coseSignAlg,
                                            const uint8_t* readerSignatureOfToBeSigned,
                                            size_t readerSignatureOfToBeSignedSize) {
+    if (ctx->sessionId != 0) {
+        EicSession* session = eicSessionGetForId(ctx->sessionId);
+        if (session == NULL) {
+            eicDebug("Error looking up session for sessionId %" PRIu32, ctx->sessionId);
+            return false;
+        }
+        EicSha256Ctx sha256;
+        uint8_t sessionTranscriptSha256[EIC_SHA256_DIGEST_SIZE];
+        eicOpsSha256Init(&sha256);
+        eicOpsSha256Update(&sha256, sessionTranscript, sessionTranscriptSize);
+        eicOpsSha256Final(&sha256, sessionTranscriptSha256);
+        if (eicCryptoMemCmp(sessionTranscriptSha256, session->sessionTranscriptSha256,
+                            EIC_SHA256_DIGEST_SIZE) != 0) {
+            eicDebug("SessionTranscript mismatch");
+            return false;
+        }
+    }
+
     if (ctx->readerPublicKeySize == 0) {
         eicDebug("No public key for reader");
         return false;
@@ -330,6 +378,20 @@
     return true;
 }
 
+static bool getChallenge(EicPresentation* ctx, uint64_t* outAuthChallenge) {
+    // Use authChallenge from session if applicable.
+    *outAuthChallenge = ctx->authChallenge;
+    if (ctx->sessionId != 0) {
+        EicSession* session = eicSessionGetForId(ctx->sessionId);
+        if (session == NULL) {
+            eicDebug("Error looking up session for sessionId %" PRIu32, ctx->sessionId);
+            return false;
+        }
+        *outAuthChallenge = session->authChallenge;
+    }
+    return true;
+}
+
 bool eicPresentationSetAuthToken(EicPresentation* ctx, uint64_t challenge, uint64_t secureUserId,
                                  uint64_t authenticatorId, int hardwareAuthenticatorType,
                                  uint64_t timeStamp, const uint8_t* mac, size_t macSize,
@@ -338,14 +400,19 @@
                                  int verificationTokenSecurityLevel,
                                  const uint8_t* verificationTokenMac,
                                  size_t verificationTokenMacSize) {
+    uint64_t authChallenge;
+    if (!getChallenge(ctx, &authChallenge)) {
+        return false;
+    }
+
     // It doesn't make sense to accept any tokens if eicPresentationCreateAuthChallenge()
     // was never called.
-    if (ctx->authChallenge == 0) {
-        eicDebug("Trying validate tokens when no auth-challenge was previously generated");
+    if (authChallenge == EIC_KM_AUTH_CHALLENGE_UNSET) {
+        eicDebug("Trying to validate tokens when no auth-challenge was previously generated");
         return false;
     }
     // At least the verification-token must have the same challenge as what was generated.
-    if (verificationTokenChallenge != ctx->authChallenge) {
+    if (verificationTokenChallenge != authChallenge) {
         eicDebug("Challenge in verification token does not match the challenge "
                  "previously generated");
         return false;
@@ -354,6 +421,7 @@
                 challenge, secureUserId, authenticatorId, hardwareAuthenticatorType, timeStamp, mac,
                 macSize, verificationTokenChallenge, verificationTokenTimestamp,
                 verificationTokenSecurityLevel, verificationTokenMac, verificationTokenMacSize)) {
+        eicDebug("Error validating authToken");
         return false;
     }
     ctx->authTokenChallenge = challenge;
@@ -377,11 +445,16 @@
     // Only ACP with auth-on-every-presentation - those with timeout == 0 - need the
     // challenge to match...
     if (timeoutMillis == 0) {
-        if (ctx->authTokenChallenge != ctx->authChallenge) {
+        uint64_t authChallenge;
+        if (!getChallenge(ctx, &authChallenge)) {
+            return false;
+        }
+
+        if (ctx->authTokenChallenge != authChallenge) {
             eicDebug("Challenge in authToken (%" PRIu64
                      ") doesn't match the challenge "
                      "that was created (%" PRIu64 ") for this session",
-                     ctx->authTokenChallenge, ctx->authChallenge);
+                     ctx->authTokenChallenge, authChallenge);
             return false;
         }
     }
@@ -490,6 +563,25 @@
                                const uint8_t signingKeyBlob[60], const char* docType,
                                size_t docTypeLength, unsigned int numNamespacesWithValues,
                                size_t expectedDeviceNamespacesSize) {
+    if (ctx->sessionId != 0) {
+        EicSession* session = eicSessionGetForId(ctx->sessionId);
+        if (session == NULL) {
+            eicDebug("Error looking up session for sessionId %" PRIu32, ctx->sessionId);
+            return false;
+        }
+        EicSha256Ctx sha256;
+        uint8_t sessionTranscriptSha256[EIC_SHA256_DIGEST_SIZE];
+        eicOpsSha256Init(&sha256);
+        eicOpsSha256Update(&sha256, sessionTranscript, sessionTranscriptSize);
+        eicOpsSha256Final(&sha256, sessionTranscriptSha256);
+        if (eicCryptoMemCmp(sessionTranscriptSha256, session->sessionTranscriptSha256,
+                            EIC_SHA256_DIGEST_SIZE) != 0) {
+            eicDebug("SessionTranscript mismatch");
+            return false;
+        }
+        readerEphemeralPublicKey = session->readerEphemeralPublicKey;
+    }
+
     uint8_t signingKeyPriv[EIC_P256_PRIV_KEY_SIZE];
     if (!eicOpsDecryptAes128Gcm(ctx->storageKey, signingKeyBlob, 60, (const uint8_t*)docType,
                                 docTypeLength, signingKeyPriv)) {
diff --git a/identity/aidl/default/libeic/EicPresentation.h b/identity/aidl/default/libeic/EicPresentation.h
index 6f7f432..a031890 100644
--- a/identity/aidl/default/libeic/EicPresentation.h
+++ b/identity/aidl/default/libeic/EicPresentation.h
@@ -30,7 +30,13 @@
 // The maximum size we support for public keys in reader certificates.
 #define EIC_PRESENTATION_MAX_READER_PUBLIC_KEY_SIZE 65
 
+// Constant used to convey that no session is associated with a presentation.
+#define EIC_PRESENTATION_ID_UNSET 0
+
 typedef struct {
+    // A non-zero number unique for this EicPresentation instance
+    uint32_t id;
+
     int featureLevel;
 
     uint8_t storageKey[EIC_AES_128_KEY_SIZE];
@@ -38,6 +44,10 @@
 
     uint8_t ephemeralPrivateKey[EIC_P256_PRIV_KEY_SIZE];
 
+    // If non-zero (not EIC_PRESENTATION_ID_UNSET), the id of the EicSession object this
+    // presentation object is associated with.
+    uint32_t sessionId;
+
     // The challenge generated with eicPresentationCreateAuthChallenge()
     uint64_t authChallenge;
 
@@ -93,10 +103,18 @@
     EicCbor cbor;
 } EicPresentation;
 
-bool eicPresentationInit(EicPresentation* ctx, bool testCredential, const char* docType,
-                         size_t docTypeLength, const uint8_t* encryptedCredentialKeys,
+// If sessionId is zero (EIC_PRESENTATION_ID_UNSET), the presentation object is not associated
+// with a session object. Otherwise it's the id of the session object.
+//
+bool eicPresentationInit(EicPresentation* ctx, uint32_t sessionId, bool testCredential,
+                         const char* docType, size_t docTypeLength,
+                         const uint8_t* encryptedCredentialKeys,
                          size_t encryptedCredentialKeysSize);
 
+bool eicPresentationShutdown(EicPresentation* ctx);
+
+bool eicPresentationGetId(EicPresentation* ctx, uint32_t* outId);
+
 bool eicPresentationGenerateSigningKeyPair(EicPresentation* ctx, const char* docType,
                                            size_t docTypeLength, time_t now,
                                            uint8_t* publicKeyCert, size_t* publicKeyCertSize,
diff --git a/identity/aidl/default/libeic/EicProvisioning.c b/identity/aidl/default/libeic/EicProvisioning.c
index c9df4fd..a241b71 100644
--- a/identity/aidl/default/libeic/EicProvisioning.c
+++ b/identity/aidl/default/libeic/EicProvisioning.c
@@ -17,8 +17,21 @@
 #include "EicProvisioning.h"
 #include "EicCommon.h"
 
+#include <inttypes.h>
+
+// Global used for assigning ids for provisioning objects.
+//
+static uint32_t gProvisioningLastIdAssigned = 0;
+
 bool eicProvisioningInit(EicProvisioning* ctx, bool testCredential) {
     eicMemSet(ctx, '\0', sizeof(EicProvisioning));
+
+    if (!eicNextId(&gProvisioningLastIdAssigned)) {
+        eicDebug("Error getting id for object");
+        return false;
+    }
+    ctx->id = gProvisioningLastIdAssigned;
+
     ctx->testCredential = testCredential;
     if (!eicOpsRandom(ctx->storageKey, EIC_AES_128_KEY_SIZE)) {
         return false;
@@ -47,6 +60,13 @@
     }
 
     eicMemSet(ctx, '\0', sizeof(EicProvisioning));
+
+    if (!eicNextId(&gProvisioningLastIdAssigned)) {
+        eicDebug("Error getting id for object");
+        return false;
+    }
+    ctx->id = gProvisioningLastIdAssigned;
+
     ctx->testCredential = testCredential;
 
     if (!eicOpsDecryptAes128Gcm(eicOpsGetHardwareBoundKey(testCredential), encryptedCredentialKeys,
@@ -96,6 +116,21 @@
     return true;
 }
 
+bool eicProvisioningShutdown(EicProvisioning* ctx) {
+    if (ctx->id == 0) {
+        eicDebug("Trying to shut down provsioning with id 0");
+        return false;
+    }
+    eicDebug("Shut down provsioning with id %" PRIu32, ctx->id);
+    eicMemSet(ctx, '\0', sizeof(EicProvisioning));
+    return true;
+}
+
+bool eicProvisioningGetId(EicProvisioning* ctx, uint32_t* outId) {
+    *outId = ctx->id;
+    return true;
+}
+
 bool eicProvisioningCreateCredentialKey(EicProvisioning* ctx, const uint8_t* challenge,
                                         size_t challengeSize, const uint8_t* applicationId,
                                         size_t applicationIdSize, uint8_t* publicKeyCert,
diff --git a/identity/aidl/default/libeic/EicProvisioning.h b/identity/aidl/default/libeic/EicProvisioning.h
index 92f1e4a..d94f8f1 100644
--- a/identity/aidl/default/libeic/EicProvisioning.h
+++ b/identity/aidl/default/libeic/EicProvisioning.h
@@ -31,6 +31,9 @@
 #define EIC_MAX_NUM_ACCESS_CONTROL_PROFILE_IDS 32
 
 typedef struct {
+    // A non-zero number unique for this EicProvisioning instance
+    uint32_t id;
+
     // Set by eicCreateCredentialKey() OR eicProvisioningInitForUpdate()
     uint8_t credentialPrivateKey[EIC_P256_PRIV_KEY_SIZE];
 
@@ -68,6 +71,10 @@
                                   size_t docTypeLength, const uint8_t* encryptedCredentialKeys,
                                   size_t encryptedCredentialKeysSize);
 
+bool eicProvisioningShutdown(EicProvisioning* ctx);
+
+bool eicProvisioningGetId(EicProvisioning* ctx, uint32_t* outId);
+
 bool eicProvisioningCreateCredentialKey(EicProvisioning* ctx, const uint8_t* challenge,
                                         size_t challengeSize, const uint8_t* applicationId,
                                         size_t applicationIdSize, uint8_t* publicKeyCert,
diff --git a/identity/aidl/default/libeic/EicSession.c b/identity/aidl/default/libeic/EicSession.c
new file mode 100644
index 0000000..d0c7a0d
--- /dev/null
+++ b/identity/aidl/default/libeic/EicSession.c
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2020, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#include "EicCommon.h"
+#include "EicSession.h"
+
+// Global used for assigning ids for session objects.
+//
+static uint32_t gSessionLastIdAssigned = 0;
+
+// The current session object or NULL if never initialized or if it has been shut down.
+//
+static EicSession* gSessionCurrent = NULL;
+
+EicSession* eicSessionGetForId(uint32_t sessionId) {
+    if (gSessionCurrent != NULL && gSessionCurrent->id == sessionId) {
+        return gSessionCurrent;
+    }
+    return NULL;
+}
+
+bool eicSessionInit(EicSession* ctx) {
+    eicMemSet(ctx, '\0', sizeof(EicSession));
+
+    if (!eicNextId(&gSessionLastIdAssigned)) {
+        eicDebug("Error getting id for object");
+        return false;
+    }
+    ctx->id = gSessionLastIdAssigned;
+
+    do {
+        if (!eicOpsRandom((uint8_t*)&(ctx->authChallenge), sizeof(ctx->authChallenge))) {
+            eicDebug("Failed generating random challenge");
+            return false;
+        }
+    } while (ctx->authChallenge == EIC_KM_AUTH_CHALLENGE_UNSET);
+
+    if (!eicOpsCreateEcKey(ctx->ephemeralPrivateKey, ctx->ephemeralPublicKey)) {
+        eicDebug("Error creating ephemeral key-pair");
+        return false;
+    }
+
+    gSessionCurrent = ctx;
+    eicDebug("Initialized session with id %" PRIu32, ctx->id);
+    return true;
+}
+
+bool eicSessionShutdown(EicSession* ctx) {
+    if (ctx->id == 0) {
+        eicDebug("Trying to shut down session with id 0");
+        return false;
+    }
+    eicDebug("Shut down session with id %" PRIu32, ctx->id);
+    eicMemSet(ctx, '\0', sizeof(EicSession));
+    gSessionCurrent = NULL;
+    return true;
+}
+
+bool eicSessionGetId(EicSession* ctx, uint32_t* outId) {
+    *outId = ctx->id;
+    return true;
+}
+
+bool eicSessionGetAuthChallenge(EicSession* ctx, uint64_t* outAuthChallenge) {
+    *outAuthChallenge = ctx->authChallenge;
+    return true;
+}
+
+bool eicSessionGetEphemeralKeyPair(EicSession* ctx,
+                                   uint8_t ephemeralPrivateKey[EIC_P256_PRIV_KEY_SIZE]) {
+    eicMemCpy(ephemeralPrivateKey, ctx->ephemeralPrivateKey, EIC_P256_PRIV_KEY_SIZE);
+    return true;
+}
+
+bool eicSessionSetReaderEphemeralPublicKey(
+        EicSession* ctx, const uint8_t readerEphemeralPublicKey[EIC_P256_PUB_KEY_SIZE]) {
+    eicMemCpy(ctx->readerEphemeralPublicKey, readerEphemeralPublicKey, EIC_P256_PUB_KEY_SIZE);
+    return true;
+}
+
+bool eicSessionSetSessionTranscript(EicSession* ctx, const uint8_t* sessionTranscript,
+                                    size_t sessionTranscriptSize) {
+    // Only accept the SessionTranscript if X and Y from the ephemeral key
+    // we created is somewhere in SessionTranscript...
+    //
+    if (eicMemMem(sessionTranscript, sessionTranscriptSize, ctx->ephemeralPublicKey,
+                  EIC_P256_PUB_KEY_SIZE / 2) == NULL) {
+        eicDebug("Error finding X from ephemeralPublicKey in sessionTranscript");
+        return false;
+    }
+    if (eicMemMem(sessionTranscript, sessionTranscriptSize,
+                  ctx->ephemeralPublicKey + EIC_P256_PUB_KEY_SIZE / 2,
+                  EIC_P256_PUB_KEY_SIZE / 2) == NULL) {
+        eicDebug("Error finding Y from ephemeralPublicKey in sessionTranscript");
+        return false;
+    }
+
+    // To save space we only store the SHA-256 of SessionTranscript
+    //
+    EicSha256Ctx shaCtx;
+    eicOpsSha256Init(&shaCtx);
+    eicOpsSha256Update(&shaCtx, sessionTranscript, sessionTranscriptSize);
+    eicOpsSha256Final(&shaCtx, ctx->sessionTranscriptSha256);
+    return true;
+}
diff --git a/identity/aidl/default/libeic/EicSession.h b/identity/aidl/default/libeic/EicSession.h
new file mode 100644
index 0000000..0303dae
--- /dev/null
+++ b/identity/aidl/default/libeic/EicSession.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if !defined(EIC_INSIDE_LIBEIC_H) && !defined(EIC_COMPILATION)
+#error "Never include this file directly, include libeic.h instead."
+#endif
+
+#ifndef ANDROID_HARDWARE_IDENTITY_EIC_SESSION_H
+#define ANDROID_HARDWARE_IDENTITY_EIC_SESSION_H
+
+#include "EicOps.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    // A non-zero number unique for this EicSession instance
+    uint32_t id;
+
+    // The challenge generated at construction time by eicSessionInit().
+    uint64_t authChallenge;
+
+    uint8_t ephemeralPrivateKey[EIC_P256_PRIV_KEY_SIZE];
+    uint8_t ephemeralPublicKey[EIC_P256_PUB_KEY_SIZE];
+
+    uint8_t readerEphemeralPublicKey[EIC_P256_PUB_KEY_SIZE];
+
+    uint8_t sessionTranscriptSha256[EIC_SHA256_DIGEST_SIZE];
+
+} EicSession;
+
+bool eicSessionInit(EicSession* ctx);
+
+bool eicSessionShutdown(EicSession* ctx);
+
+bool eicSessionGetId(EicSession* ctx, uint32_t* outId);
+
+bool eicSessionGetAuthChallenge(EicSession* ctx, uint64_t* outAuthChallenge);
+
+bool eicSessionGetEphemeralKeyPair(EicSession* ctx,
+                                   uint8_t ephemeralPrivateKey[EIC_P256_PRIV_KEY_SIZE]);
+
+bool eicSessionSetReaderEphemeralPublicKey(
+        EicSession* ctx, const uint8_t readerEphemeralPublicKey[EIC_P256_PUB_KEY_SIZE]);
+
+bool eicSessionSetSessionTranscript(EicSession* ctx, const uint8_t* sessionTranscript,
+                                    size_t sessionTranscriptSize);
+
+// Looks up an active session with the given id.
+//
+// Returns NULL if no active session with the given id is found.
+//
+EicSession* eicSessionGetForId(uint32_t sessionId);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // ANDROID_HARDWARE_IDENTITY_EIC_PRESENTATION_H
diff --git a/identity/aidl/default/libeic/libeic.h b/identity/aidl/default/libeic/libeic.h
index 20c8896..d89fc9a 100644
--- a/identity/aidl/default/libeic/libeic.h
+++ b/identity/aidl/default/libeic/libeic.h
@@ -27,10 +27,11 @@
  */
 #define EIC_INSIDE_LIBEIC_H
 #include "EicCbor.h"
+#include "EicCommon.h"
 #include "EicOps.h"
 #include "EicPresentation.h"
 #include "EicProvisioning.h"
-#include "EicCommon.h"
+#include "EicSession.h"
 #undef EIC_INSIDE_LIBEIC_H
 
 #ifdef __cplusplus
diff --git a/identity/aidl/vts/Android.bp b/identity/aidl/vts/Android.bp
index e5de91e..7b6f2c8 100644
--- a/identity/aidl/vts/Android.bp
+++ b/identity/aidl/vts/Android.bp
@@ -28,6 +28,7 @@
         "EndToEndTests.cpp",
         "TestCredentialTests.cpp",
         "AuthenticationKeyTests.cpp",
+        "PresentationSessionTests.cpp",
     ],
     shared_libs: [
         "libbinder",
@@ -40,9 +41,9 @@
         "libpuresoftkeymasterdevice",
         "android.hardware.keymaster@4.0",
         "android.hardware.identity-support-lib",
-        "android.hardware.identity-V3-cpp",
-        "android.hardware.keymaster-V3-cpp",
-        "android.hardware.keymaster-V3-ndk",
+        "android.hardware.identity-V4-cpp",
+        "android.hardware.keymaster-V4-cpp",
+        "android.hardware.keymaster-V4-ndk",
         "libkeymaster4support",
         "libkeymaster4_1support",
     ],
diff --git a/identity/aidl/vts/PresentationSessionTests.cpp b/identity/aidl/vts/PresentationSessionTests.cpp
new file mode 100644
index 0000000..88acb26
--- /dev/null
+++ b/identity/aidl/vts/PresentationSessionTests.cpp
@@ -0,0 +1,197 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "PresentationSessionTests"
+
+#include <aidl/Gtest.h>
+#include <aidl/Vintf.h>
+#include <aidl/android/hardware/keymaster/HardwareAuthToken.h>
+#include <aidl/android/hardware/keymaster/VerificationToken.h>
+#include <android-base/logging.h>
+#include <android/hardware/identity/IIdentityCredentialStore.h>
+#include <android/hardware/identity/support/IdentityCredentialSupport.h>
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <cppbor.h>
+#include <cppbor_parse.h>
+#include <gtest/gtest.h>
+#include <future>
+#include <map>
+#include <utility>
+
+#include "Util.h"
+
+namespace android::hardware::identity {
+
+using std::endl;
+using std::make_pair;
+using std::map;
+using std::optional;
+using std::pair;
+using std::string;
+using std::tie;
+using std::vector;
+
+using ::android::sp;
+using ::android::String16;
+using ::android::binder::Status;
+
+using ::android::hardware::keymaster::HardwareAuthToken;
+using ::android::hardware::keymaster::VerificationToken;
+
+class PresentationSessionTests : public testing::TestWithParam<string> {
+  public:
+    virtual void SetUp() override {
+        credentialStore_ = android::waitForDeclaredService<IIdentityCredentialStore>(
+                String16(GetParam().c_str()));
+        ASSERT_NE(credentialStore_, nullptr);
+        halApiVersion_ = credentialStore_->getInterfaceVersion();
+    }
+
+    void provisionData();
+
+    void provisionSingleDocument(const string& docType, vector<uint8_t>* outCredentialData,
+                                 vector<uint8_t>* outCredentialPubKey);
+
+    // Set by provisionData
+    vector<uint8_t> credential1Data_;
+    vector<uint8_t> credential1PubKey_;
+    vector<uint8_t> credential2Data_;
+    vector<uint8_t> credential2PubKey_;
+
+    sp<IIdentityCredentialStore> credentialStore_;
+    int halApiVersion_;
+};
+
+void PresentationSessionTests::provisionData() {
+    provisionSingleDocument("org.iso.18013-5.2019.mdl", &credential1Data_, &credential1PubKey_);
+    provisionSingleDocument("org.blah.OtherhDocTypeXX", &credential2Data_, &credential2PubKey_);
+}
+
+void PresentationSessionTests::provisionSingleDocument(const string& docType,
+                                                       vector<uint8_t>* outCredentialData,
+                                                       vector<uint8_t>* outCredentialPubKey) {
+    bool testCredential = true;
+    sp<IWritableIdentityCredential> wc;
+    ASSERT_TRUE(credentialStore_->createCredential(docType, testCredential, &wc).isOk());
+
+    vector<uint8_t> attestationApplicationId;
+    vector<uint8_t> attestationChallenge = {1};
+    vector<Certificate> certChain;
+    ASSERT_TRUE(wc->getAttestationCertificate(attestationApplicationId, attestationChallenge,
+                                              &certChain)
+                        .isOk());
+
+    optional<vector<uint8_t>> optCredentialPubKey =
+            support::certificateChainGetTopMostKey(certChain[0].encodedCertificate);
+    ASSERT_TRUE(optCredentialPubKey);
+    *outCredentialPubKey = optCredentialPubKey.value();
+
+    size_t proofOfProvisioningSize = 106;
+    // Not in v1 HAL, may fail
+    wc->setExpectedProofOfProvisioningSize(proofOfProvisioningSize);
+
+    ASSERT_TRUE(wc->startPersonalization(1 /* numAccessControlProfiles */,
+                                         {1} /* numDataElementsPerNamespace */)
+                        .isOk());
+
+    // Access control profile 0: open access - don't care about the returned SACP
+    SecureAccessControlProfile sacp;
+    ASSERT_TRUE(wc->addAccessControlProfile(1, {}, false, 0, 0, &sacp).isOk());
+
+    // Single entry - don't care about the returned encrypted data
+    ASSERT_TRUE(wc->beginAddEntry({1}, "ns", "Some Data", 1).isOk());
+    vector<uint8_t> encryptedData;
+    ASSERT_TRUE(wc->addEntryValue({9}, &encryptedData).isOk());
+
+    vector<uint8_t> proofOfProvisioningSignature;
+    Status status = wc->finishAddingEntries(outCredentialData, &proofOfProvisioningSignature);
+    EXPECT_TRUE(status.isOk()) << status.exceptionCode() << ": " << status.exceptionMessage();
+}
+
+// This checks that any methods called on an IIdentityCredential obtained via a session
+// returns STATUS_FAILED except for startRetrieval(), startRetrieveEntryValue(),
+// retrieveEntryValue(), finishRetrieval(), setRequestedNamespaces(), setVerificationToken()
+//
+TEST_P(PresentationSessionTests, returnsFailureOnUnsupportedMethods) {
+    if (halApiVersion_ < 4) {
+        GTEST_SKIP() << "Need HAL API version 4, have " << halApiVersion_;
+    }
+
+    provisionData();
+
+    sp<IPresentationSession> session;
+    ASSERT_TRUE(credentialStore_
+                        ->createPresentationSession(
+                                CipherSuite::CIPHERSUITE_ECDHE_HKDF_ECDSA_WITH_AES_256_GCM_SHA256,
+                                &session)
+                        .isOk());
+
+    sp<IIdentityCredential> credential;
+    ASSERT_TRUE(session->getCredential(credential1Data_, &credential).isOk());
+
+    Status result;
+
+    vector<uint8_t> signatureProofOfDeletion;
+    result = credential->deleteCredential(&signatureProofOfDeletion);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    vector<uint8_t> ephemeralKeyPair;
+    result = credential->createEphemeralKeyPair(&ephemeralKeyPair);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    result = credential->setReaderEphemeralPublicKey({});
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    int64_t authChallenge;
+    result = credential->createAuthChallenge(&authChallenge);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    Certificate certificate;
+    vector<uint8_t> signingKeyBlob;
+    result = credential->generateSigningKeyPair(&signingKeyBlob, &certificate);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    result = credential->deleteCredentialWithChallenge({}, &signatureProofOfDeletion);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    vector<uint8_t> signatureProofOfOwnership;
+    result = credential->proveOwnership({}, &signatureProofOfOwnership);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+
+    sp<IWritableIdentityCredential> writableCredential;
+    result = credential->updateCredential(&writableCredential);
+    EXPECT_EQ(binder::Status::EX_SERVICE_SPECIFIC, result.exceptionCode());
+    EXPECT_EQ(IIdentityCredentialStore::STATUS_FAILED, result.serviceSpecificErrorCode());
+}
+
+// TODO: need to add tests to check that the returned IIdentityCredential works
+//       as intended.
+
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(PresentationSessionTests);
+INSTANTIATE_TEST_SUITE_P(
+        Identity, PresentationSessionTests,
+        testing::ValuesIn(android::getAidlHalInstanceNames(IIdentityCredentialStore::descriptor)),
+        android::PrintInstanceNameToString);
+
+}  // namespace android::hardware::identity
diff --git a/keymaster/4.0/IKeymasterDevice.hal b/keymaster/4.0/IKeymasterDevice.hal
index dfde060..1c6ae47 100644
--- a/keymaster/4.0/IKeymasterDevice.hal
+++ b/keymaster/4.0/IKeymasterDevice.hal
@@ -1254,7 +1254,8 @@
      * o PaddingMode::RSA_PSS.  For PSS-padded signature operations, the PSS salt length must match
      *   the size of the PSS digest selected.  The digest specified with Tag::DIGEST in inputParams
      *   on begin() must be used as the PSS digest algorithm, MGF1 must be used as the mask
-     *   generation function and SHA1 must be used as the MGF1 digest algorithm.
+     *   generation function and the digest specified with Tag:DIGEST in inputParams must also be
+     *   used as the MGF1 digest algorithm.
      *
      * o PaddingMode::RSA_OAEP.  The digest specified with Tag::DIGEST in inputParams on begin is
      *   used as the OAEP digest algorithm, MGF1 must be used as the mask generation function and
diff --git a/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp b/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp
index 2449268..2ff33b0 100644
--- a/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp
+++ b/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp
@@ -1712,6 +1712,7 @@
                     case PaddingMode::RSA_PSS:
                         EXPECT_GT(EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, RSA_PKCS1_PSS_PADDING), 0);
                         EXPECT_GT(EVP_PKEY_CTX_set_rsa_pss_saltlen(pkey_ctx, EVP_MD_size(md)), 0);
+                        EXPECT_GT(EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, md), 0);
                         break;
                     case PaddingMode::RSA_PKCS1_1_5_SIGN:
                         // PKCS1 is the default; don't need to set anything.
diff --git a/neuralnetworks/1.0/utils/Android.bp b/neuralnetworks/1.0/utils/Android.bp
index 31cdded..ad30e30 100644
--- a/neuralnetworks/1.0/utils/Android.bp
+++ b/neuralnetworks/1.0/utils/Android.bp
@@ -31,16 +31,11 @@
     export_include_dirs: ["include"],
     cflags: ["-Wthread-safety"],
     static_libs: [
+        "android.hardware.neuralnetworks@1.0",
         "libarect",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
     ],
-    shared_libs: [
-        "android.hardware.neuralnetworks@1.0",
-    ],
-    export_static_lib_headers: [
-        "neuralnetworks_utils_hal_common",
-    ],
     target: {
         android: {
             shared_libs: ["libnativewindow"],
@@ -55,19 +50,14 @@
     static_libs: [
         "android.hardware.neuralnetworks@1.0",
         "libgmock",
-        "libneuralnetworks_common",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
         "neuralnetworks_utils_hal_1_0",
     ],
     shared_libs: [
-        "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
         "libbase",
         "libcutils",
-        "libfmq",
         "libhidlbase",
-        "libhidlmemory",
         "liblog",
         "libutils",
     ],
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h
index 8bd2fbe..cef76c6 100644
--- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h
+++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h
@@ -45,12 +45,15 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
   private:
     const nn::SharedPreparedModel kPreparedModel;
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h
index 0a6ca3e..d7c43ef 100644
--- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h
+++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h
@@ -65,8 +65,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h
index bdb5b54..337c132 100644
--- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h
+++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h
@@ -49,18 +49,23 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
             const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
             const nn::OptionalDuration& loopTimeoutDuration,
-            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+            const nn::OptionalDuration& timeoutDurationAfterFence,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
 
diff --git a/neuralnetworks/1.0/utils/src/Burst.cpp b/neuralnetworks/1.0/utils/src/Burst.cpp
index 1284721..3642bc6 100644
--- a/neuralnetworks/1.0/utils/src/Burst.cpp
+++ b/neuralnetworks/1.0/utils/src/Burst.cpp
@@ -50,15 +50,20 @@
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalTimePoint& deadline,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
-    return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+    return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration, hints,
+                                   extensionNameToPrefix);
 }
 
 nn::GeneralResult<nn::SharedExecution> Burst::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
-    return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration);
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+    return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration, hints,
+                                                   extensionNameToPrefix);
 }
 
 }  // namespace android::hardware::neuralnetworks::V1_0::utils
diff --git a/neuralnetworks/1.0/utils/src/Device.cpp b/neuralnetworks/1.0/utils/src/Device.cpp
index b0c236e..620d040 100644
--- a/neuralnetworks/1.0/utils/src/Device.cpp
+++ b/neuralnetworks/1.0/utils/src/Device.cpp
@@ -143,7 +143,9 @@
 nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
         const nn::Model& model, nn::ExecutionPreference /*preference*/, nn::Priority /*priority*/,
         nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
-        const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
+        const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that model is ready for IPC.
     std::optional<nn::Model> maybeModelInShared;
     const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.0/utils/src/PreparedModel.cpp b/neuralnetworks/1.0/utils/src/PreparedModel.cpp
index 00e7d22..b8055fc 100644
--- a/neuralnetworks/1.0/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/1.0/utils/src/PreparedModel.cpp
@@ -59,7 +59,9 @@
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
         const nn::Request& request, nn::MeasureTiming /*measure*/,
         const nn::OptionalTimePoint& /*deadline*/,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -94,19 +96,22 @@
 }
 
 nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& /*request*/,
-                             const std::vector<nn::SyncFence>& /*waitFor*/,
-                             nn::MeasureTiming /*measure*/,
-                             const nn::OptionalTimePoint& /*deadline*/,
-                             const nn::OptionalDuration& /*loopTimeoutDuration*/,
-                             const nn::OptionalDuration& /*timeoutDurationAfterFence*/) const {
+PreparedModel::executeFenced(
+        const nn::Request& /*request*/, const std::vector<nn::SyncFence>& /*waitFor*/,
+        nn::MeasureTiming /*measure*/, const nn::OptionalTimePoint& /*deadline*/,
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const nn::OptionalDuration& /*timeoutDurationAfterFence*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
            << "IPreparedModel::executeFenced is not supported on 1.0 HAL service";
 }
 
 nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming /*measure*/,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
diff --git a/neuralnetworks/1.0/utils/test/DeviceTest.cpp b/neuralnetworks/1.0/utils/test/DeviceTest.cpp
index 83e555f..9e9db16 100644
--- a/neuralnetworks/1.0/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.0/utils/test/DeviceTest.cpp
@@ -380,7 +380,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -399,7 +399,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -417,7 +417,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -435,7 +435,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -452,7 +452,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -469,7 +469,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -488,7 +488,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp
index 7820c06..e03a98d 100644
--- a/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp
@@ -121,7 +121,7 @@
             .WillOnce(Invoke(makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::NONE)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     EXPECT_TRUE(result.has_value())
@@ -138,7 +138,7 @@
                                          V1_0::ErrorStatus::GENERAL_FAILURE)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -155,7 +155,7 @@
                     makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -171,7 +171,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -187,7 +187,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -205,7 +205,7 @@
     EXPECT_CALL(*mockPreparedModel, execute(_, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -218,7 +218,7 @@
     const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -235,7 +235,7 @@
             .WillRepeatedly(Invoke(makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::NONE)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -258,7 +258,7 @@
                                          V1_0::ErrorStatus::GENERAL_FAILURE)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -279,7 +279,7 @@
                     makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -299,7 +299,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -319,7 +319,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -341,7 +341,7 @@
     EXPECT_CALL(*mockPreparedModel, execute(_, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -358,7 +358,7 @@
     const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
diff --git a/neuralnetworks/1.1/utils/Android.bp b/neuralnetworks/1.1/utils/Android.bp
index 737ff58..4b8999f 100644
--- a/neuralnetworks/1.1/utils/Android.bp
+++ b/neuralnetworks/1.1/utils/Android.bp
@@ -31,17 +31,12 @@
     export_include_dirs: ["include"],
     cflags: ["-Wthread-safety"],
     static_libs: [
+        "android.hardware.neuralnetworks@1.0",
+        "android.hardware.neuralnetworks@1.1",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
         "neuralnetworks_utils_hal_1_0",
     ],
-    shared_libs: [
-        "android.hardware.neuralnetworks@1.0",
-        "android.hardware.neuralnetworks@1.1",
-    ],
-    export_static_lib_headers: [
-        "neuralnetworks_utils_hal_common",
-    ],
 }
 
 cc_test {
@@ -52,20 +47,15 @@
         "android.hardware.neuralnetworks@1.0",
         "android.hardware.neuralnetworks@1.1",
         "libgmock",
-        "libneuralnetworks_common",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
         "neuralnetworks_utils_hal_1_0",
         "neuralnetworks_utils_hal_1_1",
     ],
     shared_libs: [
-        "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
         "libbase",
         "libcutils",
-        "libfmq",
         "libhidlbase",
-        "libhidlmemory",
         "liblog",
         "libutils",
     ],
diff --git a/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h b/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h
index d6bd36a..38ca138 100644
--- a/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h
+++ b/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h
@@ -64,8 +64,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.1/utils/src/Device.cpp b/neuralnetworks/1.1/utils/src/Device.cpp
index 3effa84..28f3276 100644
--- a/neuralnetworks/1.1/utils/src/Device.cpp
+++ b/neuralnetworks/1.1/utils/src/Device.cpp
@@ -143,7 +143,9 @@
 nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
         const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
         nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
-        const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
+        const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that model is ready for IPC.
     std::optional<nn::Model> maybeModelInShared;
     const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.1/utils/test/DeviceTest.cpp b/neuralnetworks/1.1/utils/test/DeviceTest.cpp
index 2248da6..8ab87bc 100644
--- a/neuralnetworks/1.1/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.1/utils/test/DeviceTest.cpp
@@ -390,7 +390,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -409,7 +409,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -427,7 +427,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -445,7 +445,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -462,7 +462,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -479,7 +479,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -498,7 +498,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.2/utils/Android.bp b/neuralnetworks/1.2/utils/Android.bp
index 4eefb0f..4c5f065 100644
--- a/neuralnetworks/1.2/utils/Android.bp
+++ b/neuralnetworks/1.2/utils/Android.bp
@@ -31,19 +31,14 @@
     export_include_dirs: ["include"],
     cflags: ["-Wthread-safety"],
     static_libs: [
-        "neuralnetworks_types",
-        "neuralnetworks_utils_hal_common",
-        "neuralnetworks_utils_hal_1_0",
-        "neuralnetworks_utils_hal_1_1",
-    ],
-    shared_libs: [
         "android.hardware.neuralnetworks@1.0",
         "android.hardware.neuralnetworks@1.1",
         "android.hardware.neuralnetworks@1.2",
         "libfmq",
-    ],
-    export_static_lib_headers: [
+        "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
+        "neuralnetworks_utils_hal_1_0",
+        "neuralnetworks_utils_hal_1_1",
     ],
     product_variables: {
         debuggable: { // eng and userdebug builds
@@ -71,7 +66,6 @@
         "android.hardware.neuralnetworks@1.1",
         "android.hardware.neuralnetworks@1.2",
         "libgmock",
-        "libneuralnetworks_common",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
         "neuralnetworks_utils_hal_1_0",
@@ -79,13 +73,10 @@
         "neuralnetworks_utils_hal_1_2",
     ],
     shared_libs: [
-        "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
         "libbase",
         "libcutils",
         "libfmq",
         "libhidlbase",
-        "libhidlmemory",
         "liblog",
         "libutils",
     ],
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h
index ac9411c..1b28476 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h
@@ -170,13 +170,16 @@
     // See IBurst::execute for information on this method.
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     // See IBurst::createReusableExecution for information on this method.
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     // If fallback is not nullptr, this method will invoke the fallback function to try another
     // execution path if the packet could not be sent. Otherwise, failing to send the packet will
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h
index c3348aa..4f13adc 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h
@@ -37,7 +37,7 @@
 GeneralResult<Operand::ExtraParams> unvalidatedConvert(
         const hal::V1_2::Operand::ExtraParams& extraParams);
 GeneralResult<Model> unvalidatedConvert(const hal::V1_2::Model& model);
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
         const hal::V1_2::Model::ExtensionNameAndPrefix& extensionNameAndPrefix);
 GeneralResult<OutputShape> unvalidatedConvert(const hal::V1_2::OutputShape& outputShape);
 GeneralResult<MeasureTiming> unvalidatedConvert(const hal::V1_2::MeasureTiming& measureTiming);
@@ -78,7 +78,7 @@
         const nn::Operand::ExtraParams& extraParams);
 nn::GeneralResult<Model> unvalidatedConvert(const nn::Model& model);
 nn::GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
-        const nn::Model::ExtensionNameAndPrefix& extensionNameAndPrefix);
+        const nn::ExtensionNameAndPrefix& extensionNameAndPrefix);
 nn::GeneralResult<OutputShape> unvalidatedConvert(const nn::OutputShape& outputShape);
 nn::GeneralResult<MeasureTiming> unvalidatedConvert(const nn::MeasureTiming& measureTiming);
 nn::GeneralResult<Timing> unvalidatedConvert(const nn::Timing& timing);
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h
index e7ac172..d92cf50 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h
@@ -83,8 +83,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h
index 1150e5e..72a5b2f 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h
@@ -49,18 +49,23 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
             const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
             const nn::OptionalDuration& loopTimeoutDuration,
-            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+            const nn::OptionalDuration& timeoutDurationAfterFence,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
 
diff --git a/neuralnetworks/1.2/utils/src/Burst.cpp b/neuralnetworks/1.2/utils/src/Burst.cpp
index 911fbfa..23e8070 100644
--- a/neuralnetworks/1.2/utils/src/Burst.cpp
+++ b/neuralnetworks/1.2/utils/src/Burst.cpp
@@ -305,8 +305,9 @@
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalTimePoint& deadline,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // This is the first point when we know an execution is occurring, so begin to collect
     // systraces. Note that the first point we can begin collecting systraces in
     // ExecutionBurstServer is when the RequestChannelReceiver realizes there is data in the FMQ, so
@@ -317,7 +318,7 @@
     // fall back to another execution path
     if (!compliantVersion(request).ok()) {
         // fallback to another execution path if the packet could not be sent
-        return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
+        return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration, {}, {});
     }
 
     // ensure that request is ready for IPC
@@ -346,7 +347,7 @@
     // send request packet
     const auto requestPacket = serialize(hidlRequest, hidlMeasure, slots);
     const auto fallback = [this, &request, measure, &deadline, &loopTimeoutDuration] {
-        return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
+        return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration, {}, {});
     };
     return executeInternal(requestPacket, relocation, fallback);
 }
@@ -354,14 +355,17 @@
 // See IBurst::createReusableExecution for information on this method.
 nn::GeneralResult<nn::SharedExecution> Burst::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "Burst::createReusableExecution");
 
     // if the request is valid but of a higher version than what's supported in burst execution,
     // fall back to another execution path
     if (!compliantVersion(request).ok()) {
         // fallback to another execution path if the packet could not be sent
-        return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration);
+        return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration, {},
+                                                       {});
     }
 
     // ensure that request is ready for IPC
diff --git a/neuralnetworks/1.2/utils/src/Conversions.cpp b/neuralnetworks/1.2/utils/src/Conversions.cpp
index 838d9c4..62ec2ed 100644
--- a/neuralnetworks/1.2/utils/src/Conversions.cpp
+++ b/neuralnetworks/1.2/utils/src/Conversions.cpp
@@ -212,9 +212,9 @@
     };
 }
 
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
         const hal::V1_2::Model::ExtensionNameAndPrefix& extensionNameAndPrefix) {
-    return Model::ExtensionNameAndPrefix{
+    return ExtensionNameAndPrefix{
             .name = extensionNameAndPrefix.name,
             .prefix = extensionNameAndPrefix.prefix,
     };
@@ -495,7 +495,7 @@
 }
 
 nn::GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
-        const nn::Model::ExtensionNameAndPrefix& extensionNameAndPrefix) {
+        const nn::ExtensionNameAndPrefix& extensionNameAndPrefix) {
     return Model::ExtensionNameAndPrefix{
             .name = extensionNameAndPrefix.name,
             .prefix = extensionNameAndPrefix.prefix,
diff --git a/neuralnetworks/1.2/utils/src/Device.cpp b/neuralnetworks/1.2/utils/src/Device.cpp
index e7acecd..3a58d2c 100644
--- a/neuralnetworks/1.2/utils/src/Device.cpp
+++ b/neuralnetworks/1.2/utils/src/Device.cpp
@@ -236,7 +236,9 @@
 nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
         const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
         nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& modelCache,
-        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that model is ready for IPC.
     std::optional<nn::Model> maybeModelInShared;
     const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.2/utils/src/PreparedModel.cpp b/neuralnetworks/1.2/utils/src/PreparedModel.cpp
index 6df3df3..feb3951 100644
--- a/neuralnetworks/1.2/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/1.2/utils/src/PreparedModel.cpp
@@ -91,7 +91,9 @@
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
         const nn::Request& request, nn::MeasureTiming measure,
         const nn::OptionalTimePoint& /*deadline*/,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -123,19 +125,22 @@
 }
 
 nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& /*request*/,
-                             const std::vector<nn::SyncFence>& /*waitFor*/,
-                             nn::MeasureTiming /*measure*/,
-                             const nn::OptionalTimePoint& /*deadline*/,
-                             const nn::OptionalDuration& /*loopTimeoutDuration*/,
-                             const nn::OptionalDuration& /*timeoutDurationAfterFence*/) const {
+PreparedModel::executeFenced(
+        const nn::Request& /*request*/, const std::vector<nn::SyncFence>& /*waitFor*/,
+        nn::MeasureTiming /*measure*/, const nn::OptionalTimePoint& /*deadline*/,
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const nn::OptionalDuration& /*timeoutDurationAfterFence*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
            << "IPreparedModel::executeFenced is not supported on 1.2 HAL service";
 }
 
 nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
diff --git a/neuralnetworks/1.2/utils/test/DeviceTest.cpp b/neuralnetworks/1.2/utils/test/DeviceTest.cpp
index 1dc6285..0d8c141 100644
--- a/neuralnetworks/1.2/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.2/utils/test/DeviceTest.cpp
@@ -636,7 +636,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -655,7 +655,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -673,7 +673,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -691,7 +691,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -708,7 +708,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -725,7 +725,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -746,7 +746,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp
index 5e2ad79..a5ec9d3 100644
--- a/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp
@@ -154,7 +154,7 @@
             .WillOnce(Invoke(makeExecuteSynchronously(V1_0::ErrorStatus::NONE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     EXPECT_TRUE(result.has_value())
@@ -172,7 +172,7 @@
                     makeExecuteSynchronously(V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -189,7 +189,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -206,7 +206,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -224,7 +224,7 @@
                                                        V1_0::ErrorStatus::NONE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     EXPECT_TRUE(result.has_value())
@@ -243,7 +243,7 @@
                                                        kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -261,7 +261,7 @@
                     V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -278,7 +278,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -295,7 +295,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -314,7 +314,7 @@
     EXPECT_CALL(*mockPreparedModel, execute_1_2(_, _, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -328,7 +328,7 @@
             PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -347,7 +347,7 @@
                     Invoke(makeExecuteSynchronously(V1_0::ErrorStatus::NONE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -371,7 +371,7 @@
                     makeExecuteSynchronously(V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -392,7 +392,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -413,7 +413,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -436,7 +436,7 @@
                     V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::NONE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -461,7 +461,7 @@
                                                        kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -483,7 +483,7 @@
                     V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -504,7 +504,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -525,7 +525,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -548,7 +548,7 @@
     EXPECT_CALL(*mockPreparedModel, execute_1_2(_, _, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -566,7 +566,7 @@
             PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
diff --git a/neuralnetworks/1.3/utils/Android.bp b/neuralnetworks/1.3/utils/Android.bp
index 7acb4fc..c512dda 100644
--- a/neuralnetworks/1.3/utils/Android.bp
+++ b/neuralnetworks/1.3/utils/Android.bp
@@ -31,21 +31,16 @@
     export_include_dirs: ["include"],
     cflags: ["-Wthread-safety"],
     static_libs: [
-        "neuralnetworks_types",
-        "neuralnetworks_utils_hal_common",
-        "neuralnetworks_utils_hal_1_0",
-        "neuralnetworks_utils_hal_1_1",
-        "neuralnetworks_utils_hal_1_2",
-    ],
-    shared_libs: [
         "android.hardware.neuralnetworks@1.0",
         "android.hardware.neuralnetworks@1.1",
         "android.hardware.neuralnetworks@1.2",
         "android.hardware.neuralnetworks@1.3",
         "libfmq",
-    ],
-    export_static_lib_headers: [
+        "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
+        "neuralnetworks_utils_hal_1_0",
+        "neuralnetworks_utils_hal_1_1",
+        "neuralnetworks_utils_hal_1_2",
     ],
     target: {
         host: {
@@ -69,7 +64,6 @@
         "android.hardware.neuralnetworks@1.2",
         "android.hardware.neuralnetworks@1.3",
         "libgmock",
-        "libneuralnetworks_common",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
         "neuralnetworks_utils_hal_1_0",
@@ -78,13 +72,10 @@
         "neuralnetworks_utils_hal_1_3",
     ],
     shared_libs: [
-        "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
         "libbase",
         "libcutils",
         "libfmq",
         "libhidlbase",
-        "libhidlmemory",
         "liblog",
         "libutils",
     ],
diff --git a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h
index c3c6fc4..cf5e5ea0 100644
--- a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h
+++ b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h
@@ -66,8 +66,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h
index 480438d..124cc43 100644
--- a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h
+++ b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h
@@ -48,18 +48,23 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
             const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
             const nn::OptionalDuration& loopTimeoutDuration,
-            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+            const nn::OptionalDuration& timeoutDurationAfterFence,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
 
diff --git a/neuralnetworks/1.3/utils/src/Conversions.cpp b/neuralnetworks/1.3/utils/src/Conversions.cpp
index a1d414c..09e9d80 100644
--- a/neuralnetworks/1.3/utils/src/Conversions.cpp
+++ b/neuralnetworks/1.3/utils/src/Conversions.cpp
@@ -396,7 +396,7 @@
 }
 
 nn::GeneralResult<V1_2::Model::ExtensionNameAndPrefix> unvalidatedConvert(
-        const nn::Model::ExtensionNameAndPrefix& extensionNameAndPrefix) {
+        const nn::ExtensionNameAndPrefix& extensionNameAndPrefix) {
     return V1_2::utils::unvalidatedConvert(extensionNameAndPrefix);
 }
 
diff --git a/neuralnetworks/1.3/utils/src/Device.cpp b/neuralnetworks/1.3/utils/src/Device.cpp
index 9517fda..824cec6 100644
--- a/neuralnetworks/1.3/utils/src/Device.cpp
+++ b/neuralnetworks/1.3/utils/src/Device.cpp
@@ -187,7 +187,9 @@
 nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
         const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
         nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that model is ready for IPC.
     std::optional<nn::Model> maybeModelInShared;
     const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.3/utils/src/PreparedModel.cpp b/neuralnetworks/1.3/utils/src/PreparedModel.cpp
index ce977e5..b92f877 100644
--- a/neuralnetworks/1.3/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/1.3/utils/src/PreparedModel.cpp
@@ -135,8 +135,9 @@
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalTimePoint& deadline,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -174,10 +175,13 @@
 }
 
 nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
-                             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
-                             const nn::OptionalDuration& loopTimeoutDuration,
-                             const nn::OptionalDuration& timeoutDurationAfterFence) const {
+PreparedModel::executeFenced(
+        const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
+        nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const nn::OptionalDuration& timeoutDurationAfterFence,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -230,7 +234,9 @@
 
 nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
diff --git a/neuralnetworks/1.3/utils/test/DeviceTest.cpp b/neuralnetworks/1.3/utils/test/DeviceTest.cpp
index 7eba4bc..6f48837 100644
--- a/neuralnetworks/1.3/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.3/utils/test/DeviceTest.cpp
@@ -658,7 +658,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -677,7 +677,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -695,7 +695,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -713,7 +713,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -730,7 +730,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -747,7 +747,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -768,7 +768,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp
index 6dbbd6b..51b5d29 100644
--- a/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp
@@ -182,7 +182,7 @@
             .WillOnce(Invoke(makeExecuteSynchronously(V1_3::ErrorStatus::NONE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     EXPECT_TRUE(result.has_value())
@@ -200,7 +200,7 @@
                     makeExecuteSynchronously(V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -217,7 +217,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -234,7 +234,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -252,7 +252,7 @@
                                                        V1_3::ErrorStatus::NONE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     EXPECT_TRUE(result.has_value())
@@ -271,7 +271,7 @@
                                                        kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -289,7 +289,7 @@
                     V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -306,7 +306,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -323,7 +323,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -344,7 +344,7 @@
             .WillOnce(InvokeWithoutArgs(ret));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -366,7 +366,7 @@
             .WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -396,7 +396,7 @@
             .WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -422,7 +422,7 @@
                     makeExecuteFencedReturn(V1_3::ErrorStatus::GENERAL_FAILURE, {}, nullptr)));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -439,7 +439,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -456,7 +456,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -475,7 +475,7 @@
                     Invoke(makeExecuteSynchronously(V1_3::ErrorStatus::NONE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -499,7 +499,7 @@
                     makeExecuteSynchronously(V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -520,7 +520,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -541,7 +541,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -564,7 +564,7 @@
                     V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::NONE, {}, kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -589,7 +589,7 @@
                                                        kNoTiming)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -611,7 +611,7 @@
                     V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -628,7 +628,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -649,7 +649,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -674,7 +674,7 @@
             .WillOnce(InvokeWithoutArgs(ret));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -702,7 +702,7 @@
                     Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -738,7 +738,7 @@
             .WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -768,7 +768,7 @@
                     makeExecuteFencedReturn(V1_3::ErrorStatus::GENERAL_FAILURE, {}, nullptr)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -789,7 +789,7 @@
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -810,7 +810,7 @@
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
diff --git a/neuralnetworks/aidl/Android.bp b/neuralnetworks/aidl/Android.bp
index 065105a..aa4c4b6 100644
--- a/neuralnetworks/aidl/Android.bp
+++ b/neuralnetworks/aidl/Android.bp
@@ -38,5 +38,6 @@
     versions: [
         "1",
         "2",
+        "3",
     ],
 }
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/.hash b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/.hash
new file mode 100644
index 0000000..3b9f018
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/.hash
@@ -0,0 +1 @@
+8c2c4ca2327e6f779dad6119c03d832ea4e1def1
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/BufferDesc.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/BufferDesc.aidl
new file mode 100644
index 0000000..05cec76
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/BufferDesc.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable BufferDesc {
+  int[] dimensions;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/BufferRole.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/BufferRole.aidl
new file mode 100644
index 0000000..10a6b75
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/BufferRole.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable BufferRole {
+  int modelIndex;
+  int ioIndex;
+  float probability;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Capabilities.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Capabilities.aidl
new file mode 100644
index 0000000..30877c0
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Capabilities.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Capabilities {
+  android.hardware.neuralnetworks.PerformanceInfo relaxedFloat32toFloat16PerformanceScalar;
+  android.hardware.neuralnetworks.PerformanceInfo relaxedFloat32toFloat16PerformanceTensor;
+  android.hardware.neuralnetworks.OperandPerformance[] operandPerformance;
+  android.hardware.neuralnetworks.PerformanceInfo ifPerformance;
+  android.hardware.neuralnetworks.PerformanceInfo whilePerformance;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DataLocation.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DataLocation.aidl
new file mode 100644
index 0000000..db49a38
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DataLocation.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable DataLocation {
+  int poolIndex;
+  long offset;
+  long length;
+  long padding;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DeviceBuffer.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DeviceBuffer.aidl
new file mode 100644
index 0000000..7cdd6db
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DeviceBuffer.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable DeviceBuffer {
+  android.hardware.neuralnetworks.IBuffer buffer;
+  int token;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DeviceType.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DeviceType.aidl
new file mode 100644
index 0000000..82fe8ae
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/DeviceType.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum DeviceType {
+  OTHER = 1,
+  CPU = 2,
+  GPU = 3,
+  ACCELERATOR = 4,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ErrorStatus.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ErrorStatus.aidl
new file mode 100644
index 0000000..57d5d6e
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ErrorStatus.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum ErrorStatus {
+  NONE = 0,
+  DEVICE_UNAVAILABLE = 1,
+  GENERAL_FAILURE = 2,
+  OUTPUT_INSUFFICIENT_SIZE = 3,
+  INVALID_ARGUMENT = 4,
+  MISSED_DEADLINE_TRANSIENT = 5,
+  MISSED_DEADLINE_PERSISTENT = 6,
+  RESOURCE_EXHAUSTED_TRANSIENT = 7,
+  RESOURCE_EXHAUSTED_PERSISTENT = 8,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExecutionPreference.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExecutionPreference.aidl
new file mode 100644
index 0000000..4352d8f
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExecutionPreference.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum ExecutionPreference {
+  LOW_POWER = 0,
+  FAST_SINGLE_ANSWER = 1,
+  SUSTAINED_SPEED = 2,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExecutionResult.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExecutionResult.aidl
new file mode 100644
index 0000000..44e9922
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExecutionResult.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable ExecutionResult {
+  boolean outputSufficientSize;
+  android.hardware.neuralnetworks.OutputShape[] outputShapes;
+  android.hardware.neuralnetworks.Timing timing;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Extension.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Extension.aidl
new file mode 100644
index 0000000..c47028d
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Extension.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Extension {
+  String name;
+  android.hardware.neuralnetworks.ExtensionOperandTypeInformation[] operandTypes;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
new file mode 100644
index 0000000..6c287fd
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable ExtensionNameAndPrefix {
+  String name;
+  char prefix;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExtensionOperandTypeInformation.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExtensionOperandTypeInformation.aidl
new file mode 100644
index 0000000..a3680aa
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/ExtensionOperandTypeInformation.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable ExtensionOperandTypeInformation {
+  char type;
+  boolean isTensor;
+  int byteSize;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/FencedExecutionResult.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/FencedExecutionResult.aidl
new file mode 100644
index 0000000..7952b34
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/FencedExecutionResult.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable FencedExecutionResult {
+  android.hardware.neuralnetworks.IFencedExecutionCallback callback;
+  @nullable ParcelFileDescriptor syncFence;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/FusedActivationFunc.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/FusedActivationFunc.aidl
new file mode 100644
index 0000000..7e61bbb
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/FusedActivationFunc.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum FusedActivationFunc {
+  NONE = 0,
+  RELU = 1,
+  RELU1 = 2,
+  RELU6 = 3,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IBuffer.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IBuffer.aidl
new file mode 100644
index 0000000..f10e7e2
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IBuffer.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IBuffer {
+  void copyFrom(in android.hardware.neuralnetworks.Memory src, in int[] dimensions);
+  void copyTo(in android.hardware.neuralnetworks.Memory dst);
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IBurst.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IBurst.aidl
new file mode 100644
index 0000000..eb3d0b0
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IBurst.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IBurst {
+  android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in long[] memoryIdentifierTokens, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs);
+  void releaseMemoryResource(in long memoryIdentifierToken);
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IDevice.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IDevice.aidl
new file mode 100644
index 0000000..c9c67f2
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IDevice.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IDevice {
+  android.hardware.neuralnetworks.DeviceBuffer allocate(in android.hardware.neuralnetworks.BufferDesc desc, in android.hardware.neuralnetworks.IPreparedModelParcel[] preparedModels, in android.hardware.neuralnetworks.BufferRole[] inputRoles, in android.hardware.neuralnetworks.BufferRole[] outputRoles);
+  android.hardware.neuralnetworks.Capabilities getCapabilities();
+  android.hardware.neuralnetworks.NumberOfCacheFiles getNumberOfCacheFilesNeeded();
+  android.hardware.neuralnetworks.Extension[] getSupportedExtensions();
+  boolean[] getSupportedOperations(in android.hardware.neuralnetworks.Model model);
+  android.hardware.neuralnetworks.DeviceType getType();
+  String getVersionString();
+  void prepareModel(in android.hardware.neuralnetworks.Model model, in android.hardware.neuralnetworks.ExecutionPreference preference, in android.hardware.neuralnetworks.Priority priority, in long deadlineNs, in ParcelFileDescriptor[] modelCache, in ParcelFileDescriptor[] dataCache, in byte[] token, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
+  void prepareModelFromCache(in long deadlineNs, in ParcelFileDescriptor[] modelCache, in ParcelFileDescriptor[] dataCache, in byte[] token, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
+  const int BYTE_SIZE_OF_CACHE_TOKEN = 32;
+  const int MAX_NUMBER_OF_CACHE_FILES = 32;
+  const int EXTENSION_TYPE_HIGH_BITS_PREFIX = 15;
+  const int EXTENSION_TYPE_LOW_BITS_TYPE = 16;
+  const int OPERAND_TYPE_BASE_MAX = 65535;
+  const int OPERATION_TYPE_BASE_MAX = 65535;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IFencedExecutionCallback.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IFencedExecutionCallback.aidl
new file mode 100644
index 0000000..0bfb80a
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IFencedExecutionCallback.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IFencedExecutionCallback {
+  android.hardware.neuralnetworks.ErrorStatus getExecutionInfo(out android.hardware.neuralnetworks.Timing timingLaunched, out android.hardware.neuralnetworks.Timing timingFenced);
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModel.aidl
new file mode 100644
index 0000000..fccb5dc
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModel.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IPreparedModel {
+  android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs);
+  android.hardware.neuralnetworks.FencedExecutionResult executeFenced(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs, in long durationNs);
+  android.hardware.neuralnetworks.IBurst configureExecutionBurst();
+  const long DEFAULT_LOOP_TIMEOUT_DURATION_NS = 2000000000;
+  const long MAXIMUM_LOOP_TIMEOUT_DURATION_NS = 15000000000;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModelCallback.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModelCallback.aidl
new file mode 100644
index 0000000..e0c763b
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModelCallback.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IPreparedModelCallback {
+  void notify(in android.hardware.neuralnetworks.ErrorStatus status, in android.hardware.neuralnetworks.IPreparedModel preparedModel);
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModelParcel.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModelParcel.aidl
new file mode 100644
index 0000000..dbedf12
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/IPreparedModelParcel.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable IPreparedModelParcel {
+  android.hardware.neuralnetworks.IPreparedModel preparedModel;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Memory.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Memory.aidl
new file mode 100644
index 0000000..37fa102
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Memory.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+union Memory {
+  android.hardware.common.Ashmem ashmem;
+  android.hardware.common.MappableFile mappableFile;
+  android.hardware.graphics.common.HardwareBuffer hardwareBuffer;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Model.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Model.aidl
new file mode 100644
index 0000000..30d8dda
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Model.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Model {
+  android.hardware.neuralnetworks.Subgraph main;
+  android.hardware.neuralnetworks.Subgraph[] referenced;
+  byte[] operandValues;
+  android.hardware.neuralnetworks.Memory[] pools;
+  boolean relaxComputationFloat32toFloat16;
+  android.hardware.neuralnetworks.ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/NumberOfCacheFiles.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/NumberOfCacheFiles.aidl
new file mode 100644
index 0000000..9314760
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/NumberOfCacheFiles.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable NumberOfCacheFiles {
+  int numModelCache;
+  int numDataCache;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Operand.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Operand.aidl
new file mode 100644
index 0000000..1d9bdd8
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Operand.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Operand {
+  android.hardware.neuralnetworks.OperandType type = android.hardware.neuralnetworks.OperandType.FLOAT32;
+  int[] dimensions;
+  float scale;
+  int zeroPoint;
+  android.hardware.neuralnetworks.OperandLifeTime lifetime = android.hardware.neuralnetworks.OperandLifeTime.TEMPORARY_VARIABLE;
+  android.hardware.neuralnetworks.DataLocation location;
+  @nullable android.hardware.neuralnetworks.OperandExtraParams extraParams;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandExtraParams.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandExtraParams.aidl
new file mode 100644
index 0000000..14792cf
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandExtraParams.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+union OperandExtraParams {
+  android.hardware.neuralnetworks.SymmPerChannelQuantParams channelQuant;
+  byte[] extension;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandLifeTime.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandLifeTime.aidl
new file mode 100644
index 0000000..40adfb1
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandLifeTime.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum OperandLifeTime {
+  TEMPORARY_VARIABLE = 0,
+  SUBGRAPH_INPUT = 1,
+  SUBGRAPH_OUTPUT = 2,
+  CONSTANT_COPY = 3,
+  CONSTANT_POOL = 4,
+  NO_VALUE = 5,
+  SUBGRAPH = 6,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandPerformance.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandPerformance.aidl
new file mode 100644
index 0000000..ebb361b
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandPerformance.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable OperandPerformance {
+  android.hardware.neuralnetworks.OperandType type = android.hardware.neuralnetworks.OperandType.FLOAT32;
+  android.hardware.neuralnetworks.PerformanceInfo info;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandType.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandType.aidl
new file mode 100644
index 0000000..9f2c759
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperandType.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum OperandType {
+  FLOAT32 = 0,
+  INT32 = 1,
+  UINT32 = 2,
+  TENSOR_FLOAT32 = 3,
+  TENSOR_INT32 = 4,
+  TENSOR_QUANT8_ASYMM = 5,
+  BOOL = 6,
+  TENSOR_QUANT16_SYMM = 7,
+  TENSOR_FLOAT16 = 8,
+  TENSOR_BOOL8 = 9,
+  FLOAT16 = 10,
+  TENSOR_QUANT8_SYMM_PER_CHANNEL = 11,
+  TENSOR_QUANT16_ASYMM = 12,
+  TENSOR_QUANT8_SYMM = 13,
+  TENSOR_QUANT8_ASYMM_SIGNED = 14,
+  SUBGRAPH = 15,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Operation.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Operation.aidl
new file mode 100644
index 0000000..a4a3fbe
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Operation.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Operation {
+  android.hardware.neuralnetworks.OperationType type = android.hardware.neuralnetworks.OperationType.ADD;
+  int[] inputs;
+  int[] outputs;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperationType.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperationType.aidl
new file mode 100644
index 0000000..34506c8
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OperationType.aidl
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum OperationType {
+  ADD = 0,
+  AVERAGE_POOL_2D = 1,
+  CONCATENATION = 2,
+  CONV_2D = 3,
+  DEPTHWISE_CONV_2D = 4,
+  DEPTH_TO_SPACE = 5,
+  DEQUANTIZE = 6,
+  EMBEDDING_LOOKUP = 7,
+  FLOOR = 8,
+  FULLY_CONNECTED = 9,
+  HASHTABLE_LOOKUP = 10,
+  L2_NORMALIZATION = 11,
+  L2_POOL_2D = 12,
+  LOCAL_RESPONSE_NORMALIZATION = 13,
+  LOGISTIC = 14,
+  LSH_PROJECTION = 15,
+  LSTM = 16,
+  MAX_POOL_2D = 17,
+  MUL = 18,
+  RELU = 19,
+  RELU1 = 20,
+  RELU6 = 21,
+  RESHAPE = 22,
+  RESIZE_BILINEAR = 23,
+  RNN = 24,
+  SOFTMAX = 25,
+  SPACE_TO_DEPTH = 26,
+  SVDF = 27,
+  TANH = 28,
+  BATCH_TO_SPACE_ND = 29,
+  DIV = 30,
+  MEAN = 31,
+  PAD = 32,
+  SPACE_TO_BATCH_ND = 33,
+  SQUEEZE = 34,
+  STRIDED_SLICE = 35,
+  SUB = 36,
+  TRANSPOSE = 37,
+  ABS = 38,
+  ARGMAX = 39,
+  ARGMIN = 40,
+  AXIS_ALIGNED_BBOX_TRANSFORM = 41,
+  BIDIRECTIONAL_SEQUENCE_LSTM = 42,
+  BIDIRECTIONAL_SEQUENCE_RNN = 43,
+  BOX_WITH_NMS_LIMIT = 44,
+  CAST = 45,
+  CHANNEL_SHUFFLE = 46,
+  DETECTION_POSTPROCESSING = 47,
+  EQUAL = 48,
+  EXP = 49,
+  EXPAND_DIMS = 50,
+  GATHER = 51,
+  GENERATE_PROPOSALS = 52,
+  GREATER = 53,
+  GREATER_EQUAL = 54,
+  GROUPED_CONV_2D = 55,
+  HEATMAP_MAX_KEYPOINT = 56,
+  INSTANCE_NORMALIZATION = 57,
+  LESS = 58,
+  LESS_EQUAL = 59,
+  LOG = 60,
+  LOGICAL_AND = 61,
+  LOGICAL_NOT = 62,
+  LOGICAL_OR = 63,
+  LOG_SOFTMAX = 64,
+  MAXIMUM = 65,
+  MINIMUM = 66,
+  NEG = 67,
+  NOT_EQUAL = 68,
+  PAD_V2 = 69,
+  POW = 70,
+  PRELU = 71,
+  QUANTIZE = 72,
+  QUANTIZED_16BIT_LSTM = 73,
+  RANDOM_MULTINOMIAL = 74,
+  REDUCE_ALL = 75,
+  REDUCE_ANY = 76,
+  REDUCE_MAX = 77,
+  REDUCE_MIN = 78,
+  REDUCE_PROD = 79,
+  REDUCE_SUM = 80,
+  ROI_ALIGN = 81,
+  ROI_POOLING = 82,
+  RSQRT = 83,
+  SELECT = 84,
+  SIN = 85,
+  SLICE = 86,
+  SPLIT = 87,
+  SQRT = 88,
+  TILE = 89,
+  TOPK_V2 = 90,
+  TRANSPOSE_CONV_2D = 91,
+  UNIDIRECTIONAL_SEQUENCE_LSTM = 92,
+  UNIDIRECTIONAL_SEQUENCE_RNN = 93,
+  RESIZE_NEAREST_NEIGHBOR = 94,
+  QUANTIZED_LSTM = 95,
+  IF = 96,
+  WHILE = 97,
+  ELU = 98,
+  HARD_SWISH = 99,
+  FILL = 100,
+  RANK = 101,
+  BATCH_MATMUL = 102,
+  PACK = 103,
+  MIRROR_PAD = 104,
+  REVERSE = 105,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OutputShape.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OutputShape.aidl
new file mode 100644
index 0000000..f733505
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/OutputShape.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable OutputShape {
+  int[] dimensions;
+  boolean isSufficient;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/PerformanceInfo.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/PerformanceInfo.aidl
new file mode 100644
index 0000000..04910f5
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/PerformanceInfo.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable PerformanceInfo {
+  float execTime;
+  float powerUsage;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Priority.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Priority.aidl
new file mode 100644
index 0000000..8f35709
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Priority.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@Backing(type="int") @VintfStability
+enum Priority {
+  LOW = 0,
+  MEDIUM = 1,
+  HIGH = 2,
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Request.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Request.aidl
new file mode 100644
index 0000000..39ec7a9
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Request.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Request {
+  android.hardware.neuralnetworks.RequestArgument[] inputs;
+  android.hardware.neuralnetworks.RequestArgument[] outputs;
+  android.hardware.neuralnetworks.RequestMemoryPool[] pools;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/RequestArgument.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/RequestArgument.aidl
new file mode 100644
index 0000000..e3541c0
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/RequestArgument.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable RequestArgument {
+  boolean hasNoValue;
+  android.hardware.neuralnetworks.DataLocation location;
+  int[] dimensions;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/RequestMemoryPool.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/RequestMemoryPool.aidl
new file mode 100644
index 0000000..312f581
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/RequestMemoryPool.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+union RequestMemoryPool {
+  android.hardware.neuralnetworks.Memory pool;
+  int token;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Subgraph.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Subgraph.aidl
new file mode 100644
index 0000000..b7d4451
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Subgraph.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Subgraph {
+  android.hardware.neuralnetworks.Operand[] operands;
+  android.hardware.neuralnetworks.Operation[] operations;
+  int[] inputIndexes;
+  int[] outputIndexes;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/SymmPerChannelQuantParams.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/SymmPerChannelQuantParams.aidl
new file mode 100644
index 0000000..02d68f9
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/SymmPerChannelQuantParams.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable SymmPerChannelQuantParams {
+  float[] scales;
+  int channelDim;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Timing.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Timing.aidl
new file mode 100644
index 0000000..bcc83cf
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/3/android/hardware/neuralnetworks/Timing.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable Timing {
+  long timeOnDeviceNs;
+  long timeInDriverNs;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/ExecutionConfig.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/ExecutionConfig.aidl
new file mode 100644
index 0000000..cb85743
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/ExecutionConfig.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable ExecutionConfig {
+  boolean measureTiming;
+  long loopTimeoutDurationNs;
+  android.hardware.neuralnetworks.TokenValuePair[] executionHints;
+  android.hardware.neuralnetworks.ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl
index eb3d0b0..461fdfa 100644
--- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl
@@ -36,4 +36,5 @@
 interface IBurst {
   android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in long[] memoryIdentifierTokens, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs);
   void releaseMemoryResource(in long memoryIdentifierToken);
+  android.hardware.neuralnetworks.ExecutionResult executeSynchronouslyWithConfig(in android.hardware.neuralnetworks.Request request, in long[] memoryIdentifierTokens, in android.hardware.neuralnetworks.ExecutionConfig config, in long deadlineNs);
 }
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl
index c9c67f2..c0fba47 100644
--- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl
@@ -43,6 +43,7 @@
   String getVersionString();
   void prepareModel(in android.hardware.neuralnetworks.Model model, in android.hardware.neuralnetworks.ExecutionPreference preference, in android.hardware.neuralnetworks.Priority priority, in long deadlineNs, in ParcelFileDescriptor[] modelCache, in ParcelFileDescriptor[] dataCache, in byte[] token, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
   void prepareModelFromCache(in long deadlineNs, in ParcelFileDescriptor[] modelCache, in ParcelFileDescriptor[] dataCache, in byte[] token, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
+  void prepareModelWithConfig(in android.hardware.neuralnetworks.Model model, in android.hardware.neuralnetworks.PrepareModelConfig config, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
   const int BYTE_SIZE_OF_CACHE_TOKEN = 32;
   const int MAX_NUMBER_OF_CACHE_FILES = 32;
   const int EXTENSION_TYPE_HIGH_BITS_PREFIX = 15;
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IExecution.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IExecution.aidl
new file mode 100644
index 0000000..ab5275e
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IExecution.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+interface IExecution {
+  android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in long deadlineNs);
+  android.hardware.neuralnetworks.FencedExecutionResult executeFenced(in ParcelFileDescriptor[] waitFor, in long deadlineNs, in long durationNs);
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl
index fccb5dc..fb0c372 100644
--- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl
@@ -37,6 +37,9 @@
   android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs);
   android.hardware.neuralnetworks.FencedExecutionResult executeFenced(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs, in long durationNs);
   android.hardware.neuralnetworks.IBurst configureExecutionBurst();
+  android.hardware.neuralnetworks.IExecution createReusableExecution(in android.hardware.neuralnetworks.Request request, in android.hardware.neuralnetworks.ExecutionConfig config);
+  android.hardware.neuralnetworks.ExecutionResult executeSynchronouslyWithConfig(in android.hardware.neuralnetworks.Request request, in android.hardware.neuralnetworks.ExecutionConfig config, in long deadlineNs);
+  android.hardware.neuralnetworks.FencedExecutionResult executeFencedWithConfig(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in android.hardware.neuralnetworks.ExecutionConfig config, in long deadlineNs, in long durationNs);
   const long DEFAULT_LOOP_TIMEOUT_DURATION_NS = 2000000000;
   const long MAXIMUM_LOOP_TIMEOUT_DURATION_NS = 15000000000;
 }
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/PrepareModelConfig.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/PrepareModelConfig.aidl
new file mode 100644
index 0000000..85c924f
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/PrepareModelConfig.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable PrepareModelConfig {
+  android.hardware.neuralnetworks.ExecutionPreference preference;
+  android.hardware.neuralnetworks.Priority priority;
+  long deadlineNs;
+  ParcelFileDescriptor[] modelCache;
+  ParcelFileDescriptor[] dataCache;
+  byte[] cacheToken;
+  android.hardware.neuralnetworks.TokenValuePair[] compilationHints;
+  android.hardware.neuralnetworks.ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/TokenValuePair.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/TokenValuePair.aidl
new file mode 100644
index 0000000..e477d6e
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/TokenValuePair.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE.                          //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+//     the interface (from the latest frozen version), the build system will
+//     prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable TokenValuePair {
+  int token;
+  byte[] value;
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/ExecutionConfig.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExecutionConfig.aidl
new file mode 100644
index 0000000..00f1e11
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExecutionConfig.aidl
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+import android.hardware.neuralnetworks.ExtensionNameAndPrefix;
+import android.hardware.neuralnetworks.TokenValuePair;
+
+/**
+ * A type that is used to represent all configuration related to
+ * an Execution.
+ */
+@VintfStability
+parcelable ExecutionConfig {
+    /**
+     * Specifies whether or not to measure duration of the execution.
+     * For {@link IPreparedModel::executeSynchronouslyWithConfig}, the duration runs from the time
+     * the driver sees the corresponding call to the execute function to the time the driver returns
+     * from the function. For {@link IPreparedModel::executeFencedWithConfig}, please refer to
+     * {@link IPreparedModelCallback} for details.
+     */
+    boolean measureTiming;
+    /**
+     * The maximum amount of time in nanoseconds that should be spent
+     * executing a {@link OperationType::WHILE} operation. If a loop
+     * condition model does not output false within this duration,
+     * the execution must be aborted. If -1 is provided, the maximum
+     * amount of time is {@link DEFAULT_LOOP_TIMEOUT_DURATION_NS}.
+     * Other negative values are invalid. When provided, the duration
+     * must not exceed {@link MAXIMUM_LOOP_TIMEOUT_DURATION_NS}.
+     */
+    long loopTimeoutDurationNs;
+    /**
+     * A vector of token / value pairs represent vendor specific
+     * execution hints or metadata. The provided TokenValuePairs must not
+     * contain the same token twice. The driver must validate the
+     * data and ignore invalid hints. It is up to the driver to
+     * decide whether to respect the provided hints or not.
+     */
+    TokenValuePair[] executionHints;
+    /**
+     * The mapping between extension names and prefixes of token values.
+     * The driver must ignore the corresponding execution hint, if
+     * the extension is not supported.
+     */
+    ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl
index 20109bd..9f70a53 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl
@@ -20,6 +20,10 @@
 
 /**
  * Information about an extension.
+ *
+ * The extension can provide zero or more operation types (which are not enumerated), zero or more
+ * operand types (which are enumerated in {@link Extension::operandTypes}, and compilation and
+ * execution hints (which are not enumerated).
  */
 @VintfStability
 parcelable Extension {
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
index 29be93f..6c296e0 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
@@ -17,7 +17,8 @@
 package android.hardware.neuralnetworks;
 
 /**
- * The mapping between extension names and prefixes of operand and operation type values.
+ * The mapping between extension names and prefixes of values like operand and operation type, and
+ * token in {@link TokenValuePair}.
  *
  * An operand or operation whose numeric type value is above {@link IDevice::OPERAND_TYPE_BASE_MAX}
  * or {@link IDevice::OPERATION_TYPE_BASE_MAX} respectively should be interpreted as an extension
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl
index decdc48..a05a7fb 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl
@@ -17,6 +17,7 @@
 package android.hardware.neuralnetworks;
 
 import android.hardware.neuralnetworks.ErrorStatus;
+import android.hardware.neuralnetworks.ExecutionConfig;
 import android.hardware.neuralnetworks.ExecutionResult;
 import android.hardware.neuralnetworks.Request;
 
@@ -68,6 +69,8 @@
      *
      * Only a single execution on a given burst object may be active at any time.
      *
+     * Also see {@link IBurst::executeSynchronouslyWithConfig}.
+     *
      * @param request The input and output information on which the prepared model is to be
      *                executed.
      * @param memoryIdentifierTokens A list of tokens where each token is a non-negative number
@@ -78,10 +81,10 @@
      *                runs from the time the driver sees the call to the executeSynchronously
      *                function to the time the driver returns from the function.
      * @param deadlineNs The time by which the execution is expected to complete. The time is
-     *                   measured in nanoseconds since epoch of the steady clock (as from
-     *                   std::chrono::steady_clock). If the execution cannot be finished by the
-     *                   deadline, the execution may be aborted. Passing -1 means the deadline is
-     *                   omitted. Other negative values are invalid.
+     *                   measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+     *                   &ts) or ::android::base::boot_clock). If the execution cannot be finished
+     *                   by the deadline, the execution may be aborted. Passing -1 means the
+     *                   deadline is omitted. Other negative values are invalid.
      * @param loopTimeoutDurationNs The maximum amount of time in nanoseconds that should be spent
      *                              executing a {@link OperationType::WHILE} operation. If a loop
      *                              condition model does not output false within this duration, the
@@ -117,4 +120,13 @@
      *     - INVALID_ARGUMENT if one of the input arguments is invalid
      */
     void releaseMemoryResource(in long memoryIdentifierToken);
+
+    /**
+     * For detailed specification, please refer to {@link IBurst::executeSynchronously}. The
+     * difference between the two methods is that executeSynchronouslyWithConfig takes {@link
+     * ExecutionConfig} instead of a list of configuration parameters, and ExecutionConfig contains
+     * more configuration parameters than are passed to executeSynchronously.
+     */
+    ExecutionResult executeSynchronouslyWithConfig(in Request request,
+            in long[] memoryIdentifierTokens, in ExecutionConfig config, in long deadlineNs);
 }
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl
index 72e2623..821b9fe 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl
@@ -28,6 +28,7 @@
 import android.hardware.neuralnetworks.IPreparedModelParcel;
 import android.hardware.neuralnetworks.Model;
 import android.hardware.neuralnetworks.NumberOfCacheFiles;
+import android.hardware.neuralnetworks.PrepareModelConfig;
 import android.hardware.neuralnetworks.Priority;
 
 /**
@@ -148,7 +149,7 @@
      *
      * If the device reports that caching is not supported, the user may avoid calling
      * IDevice::prepareModelFromCache or providing cache file descriptors to
-     * IDevice::prepareModel.
+     * IDevice::prepareModel or IDevice::prepareModelWithConfig.
      *
      * @return NumberOfCacheFiles structure indicating how many files for model and data cache the
      *     driver needs to cache a single prepared model. It must be less than or equal to
@@ -302,6 +303,8 @@
      *
      * Multiple threads may call prepareModel on the same model concurrently.
      *
+     * Also see {@link IDevice::prepareModelWithConfig}.
+     *
      * @param model The model to be prepared for execution.
      * @param preference Indicates the intended execution behavior of a prepared model.
      * @param priority The priority of the prepared model relative to other prepared models owned by
@@ -403,17 +406,17 @@
      * @param modelCache A vector of file descriptors for the security-sensitive cache. The length
      *                   of the vector must match the numModelCache returned from
      *                   getNumberOfCacheFilesNeeded. The cache file descriptors will be provided in
-     *                   the same order as with prepareModel.
+     *                   the same order as with prepareModel or prepareModelWithConfig.
      * @param dataCache A vector of file descriptors for the constants' cache. The length of the
      *                  vector must match the numDataCache returned from
      *                  getNumberOfCacheFilesNeeded. The cache file descriptors will be provided in
-     *                  the same order as with prepareModel.
+     *                  the same order as with prepareModel or prepareModelWithConfig.
      * @param token A caching token of length BYTE_SIZE_OF_CACHE_TOKEN identifying the prepared
      *              model. It is the same token provided when saving the cache files with
-     *              prepareModel. Tokens should be chosen to have a low rate of collision for a
-     *              particular application. The driver cannot detect a collision; a collision will
-     *              result in a failed execution or in a successful execution that produces
-     *              incorrect output values.
+     *              prepareModel or prepareModelWithConfig. Tokens should be chosen to have a low
+     *              rate of collision for a particular application. The driver cannot detect a
+     *              collision; a collision will result in a failed execution or in a successful
+     *              execution that produces incorrect output values.
      * @param callback A callback object used to return the error status of preparing the model for
      *                 execution and the prepared model if successful, nullptr otherwise. The
      *                 callback object's notify function must be called exactly once, even if the
@@ -429,4 +432,28 @@
     void prepareModelFromCache(in long deadlineNs, in ParcelFileDescriptor[] modelCache,
             in ParcelFileDescriptor[] dataCache, in byte[] token,
             in IPreparedModelCallback callback);
+
+    /**
+     * For detailed specification, please refer to {@link IDevice::prepareModel}. The only
+     * difference between the two methods is that prepareModelWithConfig takes {@link
+     * PrepareModelConfig} instead of standalone configuration parameters, which allows vendor
+     * specific compilation metadata to be passed.
+     *
+     * @param model The model to be prepared for execution.
+     * @param config Configuration parameters to prepare the model.
+     * @param callback A callback object used to return the error status of preparing the model for
+     *                 execution and the prepared model if successful, nullptr otherwise. The
+     *                 callback object's notify function must be called exactly once, even if the
+     *                 model could not be prepared.
+     * @throws ServiceSpecificException with one of the following ErrorStatus values:
+     *     - DEVICE_UNAVAILABLE if driver is offline or busy
+     *     - GENERAL_FAILURE if there is an unspecified error
+     *     - INVALID_ARGUMENT if one of the input arguments related to preparing the model is
+     *       invalid
+     *     - MISSED_DEADLINE_* if the preparation is aborted because the model cannot be prepared by
+     *       the deadline
+     *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+     */
+    void prepareModelWithConfig(
+            in Model model, in PrepareModelConfig config, in IPreparedModelCallback callback);
 }
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IExecution.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IExecution.aidl
new file mode 100644
index 0000000..3cb9c1a
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IExecution.aidl
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+import android.hardware.neuralnetworks.ExecutionResult;
+import android.hardware.neuralnetworks.FencedExecutionResult;
+
+/**
+ * IExecution represents a reusable execution object with request and most other execution
+ * properties fixed. It is used to launch executions.
+ *
+ * At most one execution may occur on a reusable execution object at any given time, either by
+ * means of executeSynchronously or executeFenced.
+ *
+ * An IExecution object is used to control a set of executions on the same prepared model with
+ * the same request and properties. IExecution objects enable some optimizations:
+ * (1) An IExecution object can preserve resources between executions. For example, a driver can
+ *     map a memory object when the IExecution object is created and cache the mapping for reuse in
+ *     subsequent executions. Any cached resource can be released when the IExecution object is
+ *     destroyed.
+ * (2) Because an IExecution object may be used for at most one execution at a time, any transient
+ *     execution resources such as intermediate tensors can be allocated once when the IExecution
+ *     object is created and freed when the IExecution object is destroyed.
+ * (3) An IExecution object is created for a fixed request. This enables the implementation to apply
+ *     request-specific optimizations. For example, an implementation can avoid request validation
+ *     and conversions when the IExecution object is reused. An implementation may also choose to
+ *     specialize the dynamic tensor shapes in the IExecution object according to the request.
+ */
+@VintfStability
+interface IExecution {
+    /**
+     * Performs a synchronous execution on the reusable execution object.
+     *
+     * The execution is performed synchronously with respect to the caller. executeSynchronously
+     * must verify the inputs to the function are correct, and the usages of memory pools allocated
+     * by IDevice::allocate are valid. If there is an error, executeSynchronously must immediately
+     * return a service specific exception with the appropriate ErrorStatus value. If the inputs to
+     * the function are valid and there is no error, executeSynchronously must perform the
+     * execution, and must not return until the execution is complete.
+     *
+     * The caller must not change the content of any data object referenced by the 'request'
+     * provided in {@link IPreparedModel::createReusableExecution} (described by the
+     * {@link DataLocation} of a {@link RequestArgument}) until executeSynchronously returns.
+     * executeSynchronously must not change the content of any of the data objects corresponding to
+     * 'request' inputs.
+     *
+     * If the execution object was configured from a prepared model wherein all tensor operands have
+     * fully specified dimensions, and the inputs to the function are valid, and at execution time
+     * every operation's input operands have legal values, then the execution should complete
+     * successfully: there must be no failure unless the device itself is in a bad state.
+     *
+     * If the execution object was created with measureTiming being true and the execution is
+     * successful, the driver may report the timing information in the returning
+     * {@link ExecutionResult}. The duration runs from the time the driver sees the call to the time
+     * the driver returns from the function.
+     *
+     * executeSynchronously may be called with an optional deadline. If the execution is not able to
+     * be completed before the provided deadline, the execution may be aborted, and either
+     * {@link ErrorStatus::MISSED_DEADLINE_TRANSIENT} or {@link
+     * ErrorStatus::MISSED_DEADLINE_PERSISTENT} may be returned. The error due to an abort must be
+     * sent the same way as other errors, described above.
+     *
+     * @param deadlineNs The time by which the execution is expected to complete. The time is
+     *                   measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+     *                   &ts) or ::android::base::boot_clock). If the execution cannot be finished
+     *                   by the deadline, the execution may be aborted. Passing -1 means the
+     *                   deadline is omitted. Other negative values are invalid.
+     * @return ExecutionResult parcelable, containing the status of the execution, output shapes
+     *     and timing information.
+     * @throws ServiceSpecificException with one of the following ErrorStatus values:
+     *     - DEVICE_UNAVAILABLE if driver is offline or busy
+     *     - GENERAL_FAILURE if there is an unspecified error
+     *     - INVALID_ARGUMENT if one of the input arguments is invalid
+     *     - MISSED_DEADLINE_* if the execution is aborted because it cannot be completed by the
+     *       deadline
+     *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+     */
+    ExecutionResult executeSynchronously(in long deadlineNs);
+
+    /**
+     * Launch a fenced asynchronous execution on the reusable execution object.
+     *
+     * The execution is performed asynchronously with respect to the caller. executeFenced must
+     * verify the inputs to the function are correct, and the usages of memory pools allocated by
+     * IDevice::allocate are valid. If there is an error, executeFenced must immediately return a
+     * service specific exception with the corresponding ErrorStatus. If the inputs to the function
+     * are valid and there is no error, executeFenced must dispatch an asynchronous task to perform
+     * the execution in the background, and immediately return a {@link FencedExecutionResult}
+     * containing two fields: a callback (which can be used by the client to query the duration and
+     * runtime error status) and a sync fence (which will be signaled once the execution is
+     * completed). If the task has finished before the call returns, syncFence file descriptor may
+     * be set to -1. The execution must wait for all the sync fences (if any) in waitFor to be
+     * signaled before starting the actual execution.
+     *
+     * When the asynchronous task has finished its execution, it must immediately signal the
+     * syncFence returned from the executeFenced call. After the syncFence is signaled, the task
+     * must not modify the content of any data object referenced by the 'request' provided in
+     * IPreparedModel::createReusableExecution (described by the {@link DataLocation} of a
+     * {@link RequestArgument}).
+     *
+     * executeFenced may be called with an optional deadline and an optional duration. If the
+     * execution is not able to be completed before the provided deadline or within the timeout
+     * duration (measured from when all sync fences in waitFor are signaled), whichever comes
+     * earlier, the execution may be aborted, and either
+     * {@link ErrorStatus::MISSED_DEADLINE_TRANSIENT} or {@link
+     * ErrorStatus::MISSED_DEADLINE_PERSISTENT} may be returned. The error due to an abort must be
+     * sent the same way as other errors, described above.
+     *
+     * If any of the sync fences in waitFor changes to error status after the executeFenced call
+     * succeeds, or the execution is aborted because it cannot finish before the deadline has been
+     * reached or the duration has elapsed, the driver must immediately set the returned syncFence
+     * to error status.
+     *
+     * @param waitFor A vector of sync fence file descriptors. Execution must not start until all
+     *                sync fences have been signaled.
+     * @param deadlineNs The time by which the execution is expected to complete. The time is
+     *                   measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+     *                   &ts) or ::android::base::boot_clock). If the execution cannot be finished
+     *                   by the deadline, the execution may be aborted. Passing -1 means the
+     *                   deadline is omitted. Other negative values are invalid.
+     * @param durationNs The length of time in nanoseconds within which the execution is expected
+     *                   to complete after all sync fences in waitFor are signaled. If the
+     *                   execution cannot be finished within the duration, the execution may be
+     *                   aborted. Passing -1 means the duration is omitted. Other negative values
+     *                   are invalid.
+     * @return The FencedExecutionResult parcelable, containing IFencedExecutionCallback and the
+     *         sync fence.
+     * @throws ServiceSpecificException with one of the following ErrorStatus values:
+     *     - DEVICE_UNAVAILABLE if driver is offline or busy
+     *     - GENERAL_FAILURE if there is an unspecified error
+     *     - INVALID_ARGUMENT if one of the input arguments is invalid, including fences in error
+     *       states.
+     *     - MISSED_DEADLINE_* if the execution is aborted because it cannot be completed by the
+     *       deadline
+     *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+     */
+    FencedExecutionResult executeFenced(
+            in ParcelFileDescriptor[] waitFor, in long deadlineNs, in long durationNs);
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl
index 956b626..949804e 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl
@@ -18,9 +18,11 @@
 
 import android.hardware.common.NativeHandle;
 import android.hardware.neuralnetworks.ErrorStatus;
+import android.hardware.neuralnetworks.ExecutionConfig;
 import android.hardware.neuralnetworks.ExecutionResult;
 import android.hardware.neuralnetworks.FencedExecutionResult;
 import android.hardware.neuralnetworks.IBurst;
+import android.hardware.neuralnetworks.IExecution;
 import android.hardware.neuralnetworks.Request;
 
 /**
@@ -67,6 +69,8 @@
      * Any number of calls to the execute* functions, in any combination, may be made concurrently,
      * even on the same IPreparedModel object.
      *
+     * Also see {@link IPreparedModel::executeSynchronouslyWithConfig}.
+     *
      * @param request The input and output information on which the prepared model is to be
      *                executed.
      * @param measure Specifies whether or not to measure duration of the execution. The duration
@@ -105,11 +109,12 @@
      * IDevice::allocate are valid. If there is an error, executeFenced must immediately return a
      * service specific exception with the corresponding ErrorStatus. If the inputs to the function
      * are valid and there is no error, executeFenced must dispatch an asynchronous task to perform
-     * the execution in the background, assign a sync fence that will be signaled once the execution
-     * is completed and immediately return a callback that can be used by the client to query the
-     * duration and runtime error status. If the task has finished before the call returns,
-     * syncFence file descriptor may be set to -1. The execution must wait for all the sync fences
-     * (if any) in waitFor to be signaled before starting the actual execution.
+     * the execution in the background, and immediately return a {@link FencedExecutionResult}
+     * containing two fields: a callback (which can be used by the client to query the duration and
+     * runtime error status) and a sync fence (which will be signaled once the execution is
+     * completed). If the task has finished before the call returns, syncFence file descriptor may
+     * be set to -1. The execution must wait for all the sync fences (if any) in waitFor to be
+     * signaled before starting the actual execution.
      *
      * When the asynchronous task has finished its execution, it must immediately signal the
      * syncFence returned from the executeFenced call. After the syncFence is signaled, the task
@@ -132,6 +137,8 @@
      * Any number of calls to the execute* functions, in any combination, may be made concurrently,
      * even on the same IPreparedModel object.
      *
+     * Also see {@link IPreparedModel::executeFencedWithConfig}.
+     *
      * @param request The input and output information on which the prepared model is to be
      *                executed. The outputs in the request must have fully specified dimensions.
      * @param waitFor A vector of sync fence file descriptors. Execution must not start until all
@@ -186,4 +193,86 @@
      *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
      */
     IBurst configureExecutionBurst();
+
+    /**
+     * Create a reusable execution object to launch multiple executions with the same request and
+     * properties.
+     *
+     * createReusableExecution must verify the inputs to the function are correct, and the usages of
+     * memory pools allocated by IDevice::allocate are valid. If there is an error,
+     * createReusableExecution must immediately return a service specific exception with the
+     * appropriate ErrorStatus value. If the inputs to the function are valid and there is no error,
+     * createReusableExecution must construct a reusable execution.
+     *
+     * @param request The input and output information on which the prepared model is to be
+     *                executed.
+     * @param config Specifies the execution configuration parameters.
+     * @return execution An IExecution object representing a reusable execution that has been
+     *                   specialized for a fixed request.
+     * @throws ServiceSpecificException with one of the following ErrorStatus values:
+     *     - DEVICE_UNAVAILABLE if driver is offline or busy
+     *     - GENERAL_FAILURE if there is an unspecified error
+     *     - INVALID_ARGUMENT if one of the input arguments is invalid
+     *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+     */
+    IExecution createReusableExecution(in Request request, in ExecutionConfig config);
+
+    /**
+     * For detailed specification, please refer to {@link IPreparedModel::executeSynchronously}. The
+     * difference between the two methods is that executeSynchronouslyWithConfig takes {@link
+     * ExecutionConfig} instead of a list of configuration parameters, and ExecutionConfig contains
+     * more configuration parameters than are passed to executeSynchronously.
+     *
+     * @param request The input and output information on which the prepared model is to be
+     *                executed.
+     * @param config Specifies the execution configuration parameters.
+     * @param deadlineNs The time by which the execution is expected to complete. The time is
+     *                   measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+     *                   &ts) or ::android::base::boot_clock). If the execution cannot be finished
+     *                   by the deadline, the execution may be aborted. Passing -1 means the
+     *                   deadline is omitted. Other negative valueggs are invalid.
+     * @return ExecutionResult parcelable, containing the status of the execution, output shapes and
+     *     timing information.
+     *     - MISSED_DEADLINE_* if the execution is aborted because it cannot be completed by the
+     *       deadline
+     *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+     */
+    ExecutionResult executeSynchronouslyWithConfig(
+            in Request request, in ExecutionConfig config, in long deadlineNs);
+
+    /**
+     * For detailed specification, please refer to {@link IPreparedModel::executeFenced}. The
+     * difference between the two methods is that executeFencedWithConfig takes {@link
+     * ExecutionConfig} instead of a list of configuration parameters, and ExecutionConfig contains
+     * more configuration parameters than are passed to executeFenced.
+     *
+     * @param request The input and output information on which the prepared model is to be
+     *                executed. The outputs in the request must have fully specified dimensions.
+     * @param waitFor A vector of sync fence file descriptors. Execution must not start until all
+     *                sync fences have been signaled.
+     * @param config Specifies the execution configuration parameters.
+     * @param deadlineNs The time by which the execution is expected to complete. The time is
+     *                   measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+     *                   &ts) or ::android::base::boot_clock). If the execution cannot be finished
+     *                   by the deadline, the execution may be aborted. Passing -1 means the
+     *                   deadline is omitted. Other negative values are invalid.
+     * @param durationNs The length of time in nanoseconds within which the execution is expected to
+     *                   complete after all sync fences in waitFor are signaled. If the execution
+     *                   cannot be finished within the duration, the execution may be aborted.
+     *                   Passing -1 means the duration is omitted. Other negative values are
+     *                   invalid.
+     * @return The FencedExecutionResult parcelable, containing IFencedExecutionCallback and the
+     *         sync fence.
+     * @throws ServiceSpecificException with one of the following ErrorStatus values:
+     *     - DEVICE_UNAVAILABLE if driver is offline or busy
+     *     - GENERAL_FAILURE if there is an unspecified error
+     *     - INVALID_ARGUMENT if one of the input arguments is invalid, including fences in error
+     *       states.
+     *     - MISSED_DEADLINE_* if the execution is aborted because it cannot be completed by the
+     *       deadline
+     *     - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+     */
+    FencedExecutionResult executeFencedWithConfig(in Request request,
+            in ParcelFileDescriptor[] waitFor, in ExecutionConfig config, in long deadlineNs,
+            in long durationNs);
 }
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/PrepareModelConfig.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/PrepareModelConfig.aidl
new file mode 100644
index 0000000..96df968
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/PrepareModelConfig.aidl
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+import android.hardware.neuralnetworks.ExecutionPreference;
+import android.hardware.neuralnetworks.ExtensionNameAndPrefix;
+import android.hardware.neuralnetworks.Priority;
+import android.hardware.neuralnetworks.TokenValuePair;
+
+/**
+ * A type that is used to represent all configuration needed to
+ * prepare a model.
+ */
+@VintfStability
+parcelable PrepareModelConfig {
+    /**
+     * Indicates the intended execution behavior of a prepared model.
+     */
+    ExecutionPreference preference;
+    /**
+     * The priority of the prepared model relative to other prepared
+     * models owned by the client.
+     */
+    Priority priority;
+    /**
+     * The time by which the model is expected to be prepared. The
+     * time is measured in nanoseconds since boot (as from
+     * clock_gettime(CLOCK_BOOTTIME, &ts) or
+     * ::android::base::boot_clock). If the model cannot be prepared
+     * by the deadline, the preparation may be aborted. Passing -1
+     * means the deadline is omitted. Other negative values are
+     * invalid.
+     */
+    long deadlineNs;
+    /**
+     * A vector of file descriptors for the security-sensitive cache.
+     * The length of the vector must either be 0 indicating that
+     * caching information is not provided, or match the
+     * numModelCache returned from IDevice::getNumberOfCacheFilesNeeded. The
+     * cache file descriptors will be provided in the same order when
+     * retrieving the preparedModel from cache files with
+     * IDevice::prepareModelFromCache.
+     */
+    ParcelFileDescriptor[] modelCache;
+    /**
+     * A vector of file descriptors for the constants' cache. The
+     * length of the vector must either be 0 indicating that caching
+     * information is not provided, or match the numDataCache
+     * returned from IDevice::getNumberOfCacheFilesNeeded. The cache file
+     * descriptors will be provided in the same order when retrieving
+     * the preparedModel from cache files with IDevice::prepareModelFromCache.
+     */
+    ParcelFileDescriptor[] dataCache;
+    /**
+     * A caching token of length IDevice::BYTE_SIZE_OF_CACHE_TOKEN identifying
+     * the prepared model. The same token will be provided when
+     * retrieving the prepared model from the cache files with
+     * IDevice::prepareModelFromCache.  Tokens should be chosen to have a low
+     * rate of collision for a particular application. The driver
+     * cannot detect a collision; a collision will result in a failed
+     * execution or in a successful execution that produces incorrect
+     * output values. If both modelCache and dataCache are empty
+     * indicating that caching information is not provided, this
+     * token must be ignored.
+     */
+    byte[] cacheToken;
+    /**
+     * A vector of token / value pairs represent vendor specific
+     * compilation hints or metadata. The provided TokenValuePairs must not
+     * contain the same token twice. The driver must validate the
+     * data and ignore invalid hints. It is up to the driver to
+     * decide whether to respect the provided hints or not.
+     */
+    TokenValuePair[] compilationHints;
+    /**
+     * The mapping between extension names and prefixes of token values.
+     * The driver must ignore the corresponding compilation hint, if
+     * the extension is not supported.
+     */
+    ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/TokenValuePair.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/TokenValuePair.aidl
new file mode 100644
index 0000000..ec665b4
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/TokenValuePair.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+/**
+ * A type that is used to represent a token / byte array data pair.
+ */
+@VintfStability
+parcelable TokenValuePair {
+    /**
+     * A 32bit integer token. The token is created by combining the
+     * extension prefix and enum defined within the extension.
+     * The low {@link IDevice::EXTENSION_TYPE_LOW_BITS_TYPE} bits of the value
+     * correspond to the hint within the extension and the high
+     * {@link IDevice::EXTENSION_TYPE_HIGH_BITS_PREFIX} bits encode the "prefix", which maps
+     * uniquely to the extension name. The sign bit is always 0.
+     *
+     * For example, if a token value is 0x7AAA000B and the corresponding
+     * {@link ExtensionNameAndPrefix} contains an entry with prefix=0x7AAA and
+     * name="vendor.test.test_extension", then the token should be interpreted as the hint
+     * 0x000B of the extension named vendor.test.test_extension.
+     */
+    int token;
+    /**
+     * A byte array containing the raw data.
+     */
+    byte[] value;
+}
diff --git a/neuralnetworks/aidl/utils/Android.bp b/neuralnetworks/aidl/utils/Android.bp
index 37ad6d6..41eae85 100644
--- a/neuralnetworks/aidl/utils/Android.bp
+++ b/neuralnetworks/aidl/utils/Android.bp
@@ -26,7 +26,14 @@
 cc_defaults {
     name: "neuralnetworks_utils_hal_aidl_defaults",
     defaults: ["neuralnetworks_utils_defaults"],
-    srcs: ["src/*"],
+    srcs: [
+        // AIDL utils that a driver may depend on.
+        "src/BufferTracker.cpp",
+        "src/Conversions.cpp",
+        "src/HalUtils.cpp",
+        "src/Utils.cpp",
+        "src/ValidateHal.cpp",
+    ],
     local_include_dirs: ["include/nnapi/hal/aidl/"],
     export_include_dirs: ["include"],
     cflags: ["-Wthread-safety"],
@@ -47,6 +54,7 @@
     },
 }
 
+// Deprecated. Remove once all modules depending on this are migrated away.
 cc_library_static {
     name: "neuralnetworks_utils_hal_aidl_v1",
     defaults: ["neuralnetworks_utils_hal_aidl_defaults"],
@@ -56,19 +64,25 @@
 }
 
 cc_library_static {
-    name: "neuralnetworks_utils_hal_aidl_v2",
-    defaults: ["neuralnetworks_utils_hal_aidl_defaults"],
-    shared_libs: [
-        "android.hardware.neuralnetworks-V2-ndk",
-    ],
-}
-
-cc_library_static {
     name: "neuralnetworks_utils_hal_aidl",
     defaults: ["neuralnetworks_utils_hal_aidl_defaults"],
-    shared_libs: [
-        "android.hardware.neuralnetworks-V3-ndk",
+    srcs: [
+        // Additional AIDL utils for the runtime.
+        "src/Assertions.cpp",
+        "src/Buffer.cpp",
+        "src/Burst.cpp",
+        "src/Callbacks.cpp",
+        "src/Device.cpp",
+        "src/Execution.cpp",
+        "src/InvalidDevice.cpp",
+        "src/PreparedModel.cpp",
+        "src/ProtectCallback.cpp",
+        "src/Service.cpp",
     ],
+    shared_libs: [
+        "android.hardware.neuralnetworks-V4-ndk",
+    ],
+    cflags: ["-DNN_AIDL_V4_OR_ABOVE"],
 }
 
 // A cc_defaults that includes the latest non-experimental AIDL utilities and other AIDL libraries
@@ -79,9 +93,10 @@
     static_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.graphics.common-V2-ndk",
-        "android.hardware.neuralnetworks-V3-ndk",
+        "android.hardware.neuralnetworks-V4-ndk",
         "neuralnetworks_utils_hal_aidl",
     ],
+    cflags: ["-DNN_AIDL_V4_OR_ABOVE"],
 }
 
 cc_test {
@@ -96,19 +111,13 @@
     static_libs: [
         "libaidlcommonsupport",
         "libgmock",
-        "libneuralnetworks_common",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
     ],
     shared_libs: [
-        "android.hidl.allocator@1.0",
         "libbase",
         "libbinder_ndk",
         "libcutils",
-        "libhidlbase",
-        "libhidlmemory",
-        "liblog",
-        "libutils",
     ],
     target: {
         android: {
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h
index 0cc78d4..f2e6e75 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h
@@ -86,10 +86,12 @@
                 GUARDED_BY(mMutex);
     };
 
+    // featureLevel is for testing purposes.
     static nn::GeneralResult<std::shared_ptr<const Burst>> create(
-            std::shared_ptr<aidl_hal::IBurst> burst);
+            std::shared_ptr<aidl_hal::IBurst> burst, nn::Version featureLevel);
 
-    Burst(PrivateConstructorTag tag, std::shared_ptr<aidl_hal::IBurst> burst);
+    Burst(PrivateConstructorTag tag, std::shared_ptr<aidl_hal::IBurst> burst,
+          nn::Version featureLevel);
 
     // See IBurst::cacheMemory for information.
     OptionalCacheHold cacheMemory(const nn::SharedMemory& memory) const override;
@@ -97,23 +99,29 @@
     // See IBurst::execute for information.
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     // See IBurst::createReusableExecution for information.
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> executeInternal(
             const aidl_hal::Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
             bool measure, int64_t deadline, int64_t loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
             const hal::utils::RequestRelocation& relocation) const;
 
   private:
     mutable std::atomic_flag mExecutionInFlight = ATOMIC_FLAG_INIT;
     const std::shared_ptr<aidl_hal::IBurst> kBurst;
     const std::shared_ptr<MemoryCache> kMemoryCache;
+    const nn::Version kFeatureLevel;
 };
 
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Callbacks.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Callbacks.h
index 168264b..960be2b 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Callbacks.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Callbacks.h
@@ -36,6 +36,8 @@
   public:
     using Data = nn::GeneralResult<nn::SharedPreparedModel>;
 
+    PreparedModelCallback(nn::Version featureLevel) : kFeatureLevel(featureLevel) {}
+
     ndk::ScopedAStatus notify(ErrorStatus status,
                               const std::shared_ptr<IPreparedModel>& preparedModel) override;
 
@@ -44,6 +46,7 @@
     Data get();
 
   private:
+    const nn::Version kFeatureLevel;
     hal::utils::TransferValue<Data> mData;
 };
 
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h
index 477b311..af58715 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h
@@ -46,6 +46,10 @@
 #include <aidl/android/hardware/neuralnetworks/SymmPerChannelQuantParams.h>
 #include <aidl/android/hardware/neuralnetworks/Timing.h>
 
+#ifdef NN_AIDL_V4_OR_ABOVE
+#include <aidl/android/hardware/neuralnetworks/TokenValuePair.h>
+#endif  // NN_AIDL_V4_OR_ABOVE
+
 #include <android/binder_auto_utils.h>
 #include <nnapi/Result.h>
 #include <nnapi/Types.h>
@@ -74,7 +78,7 @@
         const aidl_hal::SymmPerChannelQuantParams& symmPerChannelQuantParams);
 GeneralResult<Operation> unvalidatedConvert(const aidl_hal::Operation& operation);
 GeneralResult<Model> unvalidatedConvert(const aidl_hal::Model& model);
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
         const aidl_hal::ExtensionNameAndPrefix& extensionNameAndPrefix);
 GeneralResult<Model::OperandValues> unvalidatedConvert(const std::vector<uint8_t>& operandValues);
 GeneralResult<Model::Subgraph> unvalidatedConvert(const aidl_hal::Subgraph& subgraph);
@@ -97,6 +101,10 @@
         const aidl_hal::ExtensionOperandTypeInformation& operandTypeInformation);
 GeneralResult<SharedHandle> unvalidatedConvert(const ndk::ScopedFileDescriptor& handle);
 
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<TokenValuePair> unvalidatedConvert(const aidl_hal::TokenValuePair& tokenValuePair);
+#endif  // NN_AIDL_V4_OR_ABOVE
+
 GeneralResult<std::vector<Operation>> unvalidatedConvert(
         const std::vector<aidl_hal::Operation>& operations);
 
@@ -116,6 +124,14 @@
 
 GeneralResult<std::vector<Extension>> convert(const std::vector<aidl_hal::Extension>& extension);
 GeneralResult<std::vector<SharedMemory>> convert(const std::vector<aidl_hal::Memory>& memories);
+GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+        const std::vector<aidl_hal::ExtensionNameAndPrefix>& extensionNameAndPrefix);
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<std::vector<TokenValuePair>> convert(
+        const std::vector<aidl_hal::TokenValuePair>& metaData);
+#endif  // NN_AIDL_V4_OR_ABOVE
+
 GeneralResult<std::vector<OutputShape>> convert(
         const std::vector<aidl_hal::OutputShape>& outputShapes);
 GeneralResult<std::vector<SharedHandle>> convert(
@@ -152,7 +168,7 @@
 nn::GeneralResult<std::vector<uint8_t>> unvalidatedConvert(
         const nn::Model::OperandValues& operandValues);
 nn::GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
-        const nn::Model::ExtensionNameAndPrefix& extensionNameToPrefix);
+        const nn::ExtensionNameAndPrefix& extensionNameToPrefix);
 nn::GeneralResult<Model> unvalidatedConvert(const nn::Model& model);
 nn::GeneralResult<Priority> unvalidatedConvert(const nn::Priority& priority);
 nn::GeneralResult<Request> unvalidatedConvert(const nn::Request& request);
@@ -166,6 +182,10 @@
 nn::GeneralResult<Capabilities> unvalidatedConvert(const nn::Capabilities& capabilities);
 nn::GeneralResult<Extension> unvalidatedConvert(const nn::Extension& extension);
 
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<TokenValuePair> unvalidatedConvert(const nn::TokenValuePair& tokenValuePair);
+#endif  // NN_AIDL_V4_OR_ABOVE
+
 nn::GeneralResult<std::vector<uint8_t>> convert(const nn::CacheToken& cacheToken);
 nn::GeneralResult<BufferDesc> convert(const nn::BufferDesc& bufferDesc);
 nn::GeneralResult<DeviceType> convert(const nn::DeviceType& deviceType);
@@ -190,6 +210,13 @@
 nn::GeneralResult<std::vector<ndk::ScopedFileDescriptor>> convert(
         const std::vector<nn::SyncFence>& syncFences);
 nn::GeneralResult<std::vector<Extension>> convert(const std::vector<nn::Extension>& extensions);
+nn::GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix);
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<std::vector<TokenValuePair>> convert(
+        const std::vector<nn::TokenValuePair>& metaData);
+#endif  // NN_AIDL_V4_OR_ABOVE
 
 nn::GeneralResult<std::vector<int32_t>> toSigned(const std::vector<uint32_t>& vec);
 
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h
index d558f66..615c6de 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h
@@ -42,6 +42,7 @@
     struct PrivateConstructorTag {};
 
   public:
+    // featureLevel is for testing purposes.
     static nn::GeneralResult<std::shared_ptr<const Device>> create(
             std::string name, std::shared_ptr<aidl_hal::IDevice> device, nn::Version featureLevel);
 
@@ -67,8 +68,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Execution.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Execution.h
index a77ea98..14802b9 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Execution.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Execution.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_EXECUTION_H
 #define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_EXECUTION_H
 
+#include <aidl/android/hardware/neuralnetworks/IExecution.h>
+
 #include <nnapi/IExecution.h>
 #include <nnapi/Result.h>
 #include <nnapi/Types.h>
@@ -33,17 +35,22 @@
 
 namespace aidl::android::hardware::neuralnetworks::utils {
 
-class Execution final : public nn::IExecution, public std::enable_shared_from_this<Execution> {
+// A reusable execution implementation with a cached Request, internally it is still passing the
+// request to the driver in every computation.
+class ExecutionWithCachedRequest final
+    : public nn::IExecution,
+      public std::enable_shared_from_this<ExecutionWithCachedRequest> {
     struct PrivateConstructorTag {};
 
   public:
-    static nn::GeneralResult<std::shared_ptr<const Execution>> create(
+    static nn::GeneralResult<std::shared_ptr<const ExecutionWithCachedRequest>> create(
             std::shared_ptr<const PreparedModel> preparedModel, Request request,
             hal::utils::RequestRelocation relocation, bool measure, int64_t loopTimeoutDuration);
 
-    Execution(PrivateConstructorTag tag, std::shared_ptr<const PreparedModel> preparedModel,
-              Request request, hal::utils::RequestRelocation relocation, bool measure,
-              int64_t loopTimeoutDuration);
+    ExecutionWithCachedRequest(PrivateConstructorTag tag,
+                               std::shared_ptr<const PreparedModel> preparedModel, Request request,
+                               hal::utils::RequestRelocation relocation, bool measure,
+                               int64_t loopTimeoutDuration);
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> compute(
             const nn::OptionalTimePoint& deadline) const override;
@@ -60,6 +67,30 @@
     const int64_t kLoopTimeoutDuration;
 };
 
+// A reusable execution implementation that is backed by an actual AIDL IExecution object.
+class Execution final : public nn::IExecution, public std::enable_shared_from_this<Execution> {
+    struct PrivateConstructorTag {};
+
+  public:
+    static nn::GeneralResult<std::shared_ptr<const Execution>> create(
+            std::shared_ptr<aidl_hal::IExecution> execution,
+            hal::utils::RequestRelocation relocation);
+
+    Execution(PrivateConstructorTag tag, std::shared_ptr<aidl_hal::IExecution> execution,
+              hal::utils::RequestRelocation relocation);
+
+    nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> compute(
+            const nn::OptionalTimePoint& deadline) const override;
+
+    nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> computeFenced(
+            const std::vector<nn::SyncFence>& waitFor, const nn::OptionalTimePoint& deadline,
+            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+
+  private:
+    const std::shared_ptr<aidl_hal::IExecution> kExecution;
+    const hal::utils::RequestRelocation kRelocation;
+};
+
 }  // namespace aidl::android::hardware::neuralnetworks::utils
 
 #endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_EXECUTION_H
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h
index 3fb443c..cacdc26 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h
@@ -61,6 +61,13 @@
 #include <aidl/android/hardware/neuralnetworks/SymmPerChannelQuantParams.h>
 #include <aidl/android/hardware/neuralnetworks/Timing.h>
 
+#ifdef NN_AIDL_V4_OR_ABOVE
+#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
+#include <aidl/android/hardware/neuralnetworks/ExecutionConfig.h>
+#include <aidl/android/hardware/neuralnetworks/IExecution.h>
+#include <aidl/android/hardware/neuralnetworks/PrepareModelConfig.h>
+#endif  // NN_AIDL_V4_OR_ABOVE
+
 namespace android::nn {
 
 namespace aidl_hal = ::aidl::android::hardware::neuralnetworks;
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h
index e66507a..9375c1d 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h
@@ -53,6 +53,9 @@
             const std::vector<ndk::ScopedFileDescriptor>& dataCache,
             const std::vector<uint8_t>& token,
             const std::shared_ptr<IPreparedModelCallback>& callback) override;
+    ndk::ScopedAStatus prepareModelWithConfig(
+            const Model& model, const PrepareModelConfig& config,
+            const std::shared_ptr<IPreparedModelCallback>& callback) override;
     ndk::ScopedAStatus prepareModelFromCache(
             int64_t deadline, const std::vector<ndk::ScopedFileDescriptor>& modelCache,
             const std::vector<ndk::ScopedFileDescriptor>& dataCache,
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h
index 4035764..cb6a85b 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h
@@ -40,26 +40,33 @@
     struct PrivateConstructorTag {};
 
   public:
+    // featureLevel is for testing purposes.
     static nn::GeneralResult<std::shared_ptr<const PreparedModel>> create(
-            std::shared_ptr<aidl_hal::IPreparedModel> preparedModel);
+            std::shared_ptr<aidl_hal::IPreparedModel> preparedModel, nn::Version featureLevel);
 
     PreparedModel(PrivateConstructorTag tag,
-                  std::shared_ptr<aidl_hal::IPreparedModel> preparedModel);
+                  std::shared_ptr<aidl_hal::IPreparedModel> preparedModel,
+                  nn::Version featureLevel);
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
             const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
             const nn::OptionalDuration& loopTimeoutDuration,
-            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+            const nn::OptionalDuration& timeoutDurationAfterFence,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
 
@@ -67,6 +74,8 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> executeInternal(
             const Request& request, bool measure, int64_t deadline, int64_t loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
             const hal::utils::RequestRelocation& relocation) const;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
@@ -74,10 +83,13 @@
                           const std::vector<ndk::ScopedFileDescriptor>& waitFor, bool measure,
                           int64_t deadline, int64_t loopTimeoutDuration,
                           int64_t timeoutDurationAfterFence,
+                          const std::vector<nn::TokenValuePair>& hints,
+                          const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
                           const hal::utils::RequestRelocation& relocation) const;
 
   private:
     const std::shared_ptr<aidl_hal::IPreparedModel> kPreparedModel;
+    const nn::Version kFeatureLevel;
 };
 
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Utils.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Utils.h
index a27487e..beca38b 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Utils.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Utils.h
@@ -38,6 +38,8 @@
             return nn::kVersionFeatureLevel6;
         case 3:
             return nn::kVersionFeatureLevel7;
+        case 4:
+            return nn::kVersionFeatureLevel8;
         default:
             return std::nullopt;
     }
diff --git a/neuralnetworks/aidl/utils/src/Burst.cpp b/neuralnetworks/aidl/utils/src/Burst.cpp
index fb00b26..6c7aa88 100644
--- a/neuralnetworks/aidl/utils/src/Burst.cpp
+++ b/neuralnetworks/aidl/utils/src/Burst.cpp
@@ -43,12 +43,16 @@
     static nn::GeneralResult<std::shared_ptr<const BurstExecution>> create(
             std::shared_ptr<const Burst> burst, Request request,
             std::vector<int64_t> memoryIdentifierTokens, bool measure, int64_t loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
             hal::utils::RequestRelocation relocation,
             std::vector<Burst::OptionalCacheHold> cacheHolds);
 
     BurstExecution(PrivateConstructorTag tag, std::shared_ptr<const Burst> burst, Request request,
                    std::vector<int64_t> memoryIdentifierTokens, bool measure,
-                   int64_t loopTimeoutDuration, hal::utils::RequestRelocation relocation,
+                   int64_t loopTimeoutDuration, const std::vector<nn::TokenValuePair>& hints,
+                   const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
+                   hal::utils::RequestRelocation relocation,
                    std::vector<Burst::OptionalCacheHold> cacheHolds);
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> compute(
@@ -64,6 +68,8 @@
     const std::vector<int64_t> kMemoryIdentifierTokens;
     const bool kMeasure;
     const int64_t kLoopTimeoutDuration;
+    const std::vector<nn::TokenValuePair> kHints;
+    const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix;
     const hal::utils::RequestRelocation kRelocation;
     const std::vector<Burst::OptionalCacheHold> kCacheHolds;
 };
@@ -149,17 +155,20 @@
 }
 
 nn::GeneralResult<std::shared_ptr<const Burst>> Burst::create(
-        std::shared_ptr<aidl_hal::IBurst> burst) {
+        std::shared_ptr<aidl_hal::IBurst> burst, nn::Version featureLevel) {
     if (burst == nullptr) {
         return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
                << "aidl_hal::utils::Burst::create must have non-null burst";
     }
 
-    return std::make_shared<const Burst>(PrivateConstructorTag{}, std::move(burst));
+    return std::make_shared<const Burst>(PrivateConstructorTag{}, std::move(burst), featureLevel);
 }
 
-Burst::Burst(PrivateConstructorTag /*tag*/, std::shared_ptr<aidl_hal::IBurst> burst)
-    : kBurst(std::move(burst)), kMemoryCache(std::make_shared<MemoryCache>(kBurst)) {
+Burst::Burst(PrivateConstructorTag /*tag*/, std::shared_ptr<aidl_hal::IBurst> burst,
+             nn::Version featureLevel)
+    : kBurst(std::move(burst)),
+      kMemoryCache(std::make_shared<MemoryCache>(kBurst)),
+      kFeatureLevel(featureLevel) {
     CHECK(kBurst != nullptr);
 }
 
@@ -170,8 +179,9 @@
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalTimePoint& deadline,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -200,14 +210,14 @@
         memoryIdentifierTokens.push_back(-1);
     }
     CHECK_EQ(requestInShared.pools.size(), memoryIdentifierTokens.size());
-
     return executeInternal(aidlRequest, memoryIdentifierTokens, aidlMeasure, aidlDeadline,
-                           aidlLoopTimeoutDuration, relocation);
+                           aidlLoopTimeoutDuration, hints, extensionNameToPrefix, relocation);
 }
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::executeInternal(
         const Request& request, const std::vector<int64_t>& memoryIdentifierTokens, bool measure,
-        int64_t deadline, int64_t loopTimeoutDuration,
+        int64_t deadline, int64_t loopTimeoutDuration, const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
         const hal::utils::RequestRelocation& relocation) const {
     // Ensure that at most one execution is in flight at any given time.
     const bool alreadyInFlight = mExecutionInFlight.test_and_set();
@@ -221,9 +231,21 @@
     }
 
     ExecutionResult executionResult;
-    const auto ret = kBurst->executeSynchronously(request, memoryIdentifierTokens, measure,
-                                                  deadline, loopTimeoutDuration, &executionResult);
-    HANDLE_ASTATUS(ret) << "execute failed";
+    if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+        auto aidlHints = NN_TRY(convert(hints));
+        auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+        const auto ret = kBurst->executeSynchronouslyWithConfig(
+                request, memoryIdentifierTokens,
+                {measure, loopTimeoutDuration, std::move(aidlHints),
+                 std::move(aidlExtensionPrefix)},
+                deadline, &executionResult);
+        HANDLE_ASTATUS(ret) << "execute failed";
+    } else {
+        const auto ret =
+                kBurst->executeSynchronously(request, memoryIdentifierTokens, measure, deadline,
+                                             loopTimeoutDuration, &executionResult);
+        HANDLE_ASTATUS(ret) << "execute failed";
+    }
     if (!executionResult.outputSufficientSize) {
         auto canonicalOutputShapes =
                 nn::convert(executionResult.outputShapes).value_or(std::vector<nn::OutputShape>{});
@@ -241,7 +263,9 @@
 
 nn::GeneralResult<nn::SharedExecution> Burst::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -272,12 +296,15 @@
 
     return BurstExecution::create(shared_from_this(), std::move(aidlRequest),
                                   std::move(memoryIdentifierTokens), aidlMeasure,
-                                  aidlLoopTimeoutDuration, std::move(relocation), std::move(holds));
+                                  aidlLoopTimeoutDuration, hints, extensionNameToPrefix,
+                                  std::move(relocation), std::move(holds));
 }
 
 nn::GeneralResult<std::shared_ptr<const BurstExecution>> BurstExecution::create(
         std::shared_ptr<const Burst> burst, Request request,
         std::vector<int64_t> memoryIdentifierTokens, bool measure, int64_t loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
         hal::utils::RequestRelocation relocation,
         std::vector<Burst::OptionalCacheHold> cacheHolds) {
     if (burst == nullptr) {
@@ -286,13 +313,15 @@
 
     return std::make_shared<const BurstExecution>(
             PrivateConstructorTag{}, std::move(burst), std::move(request),
-            std::move(memoryIdentifierTokens), measure, loopTimeoutDuration, std::move(relocation),
-            std::move(cacheHolds));
+            std::move(memoryIdentifierTokens), measure, loopTimeoutDuration, hints,
+            extensionNameToPrefix, std::move(relocation), std::move(cacheHolds));
 }
 
 BurstExecution::BurstExecution(PrivateConstructorTag /*tag*/, std::shared_ptr<const Burst> burst,
                                Request request, std::vector<int64_t> memoryIdentifierTokens,
                                bool measure, int64_t loopTimeoutDuration,
+                               const std::vector<nn::TokenValuePair>& hints,
+                               const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
                                hal::utils::RequestRelocation relocation,
                                std::vector<Burst::OptionalCacheHold> cacheHolds)
     : kBurst(std::move(burst)),
@@ -300,6 +329,8 @@
       kMemoryIdentifierTokens(std::move(memoryIdentifierTokens)),
       kMeasure(measure),
       kLoopTimeoutDuration(loopTimeoutDuration),
+      kHints(hints),
+      kExtensionNameToPrefix(extensionNameToPrefix),
       kRelocation(std::move(relocation)),
       kCacheHolds(std::move(cacheHolds)) {}
 
@@ -307,7 +338,8 @@
         const nn::OptionalTimePoint& deadline) const {
     const auto aidlDeadline = NN_TRY(convert(deadline));
     return kBurst->executeInternal(kRequest, kMemoryIdentifierTokens, kMeasure, aidlDeadline,
-                                   kLoopTimeoutDuration, kRelocation);
+                                   kLoopTimeoutDuration, kHints, kExtensionNameToPrefix,
+                                   kRelocation);
 }
 
 nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
diff --git a/neuralnetworks/aidl/utils/src/Callbacks.cpp b/neuralnetworks/aidl/utils/src/Callbacks.cpp
index 8084970..554f3fa 100644
--- a/neuralnetworks/aidl/utils/src/Callbacks.cpp
+++ b/neuralnetworks/aidl/utils/src/Callbacks.cpp
@@ -38,16 +38,17 @@
 // nn::kVersionFeatureLevel5. On failure, this function returns with the appropriate
 // nn::GeneralError.
 nn::GeneralResult<nn::SharedPreparedModel> prepareModelCallback(
-        ErrorStatus status, const std::shared_ptr<IPreparedModel>& preparedModel) {
+        ErrorStatus status, const std::shared_ptr<IPreparedModel>& preparedModel,
+        nn::Version featureLevel) {
     HANDLE_STATUS_AIDL(status) << "model preparation failed with " << toString(status);
-    return NN_TRY(PreparedModel::create(preparedModel));
+    return NN_TRY(PreparedModel::create(preparedModel, featureLevel));
 }
 
 }  // namespace
 
 ndk::ScopedAStatus PreparedModelCallback::notify(
         ErrorStatus status, const std::shared_ptr<IPreparedModel>& preparedModel) {
-    mData.put(prepareModelCallback(status, preparedModel));
+    mData.put(prepareModelCallback(status, preparedModel, kFeatureLevel));
     return ndk::ScopedAStatus::ok();
 }
 
diff --git a/neuralnetworks/aidl/utils/src/Conversions.cpp b/neuralnetworks/aidl/utils/src/Conversions.cpp
index 113d2da..eb28db7 100644
--- a/neuralnetworks/aidl/utils/src/Conversions.cpp
+++ b/neuralnetworks/aidl/utils/src/Conversions.cpp
@@ -302,9 +302,9 @@
     };
 }
 
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
         const aidl_hal::ExtensionNameAndPrefix& extensionNameAndPrefix) {
-    return Model::ExtensionNameAndPrefix{
+    return ExtensionNameAndPrefix{
             .name = extensionNameAndPrefix.name,
             .prefix = extensionNameAndPrefix.prefix,
     };
@@ -506,6 +506,12 @@
     return std::make_shared<const Handle>(std::move(duplicatedFd));
 }
 
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<TokenValuePair> unvalidatedConvert(const aidl_hal::TokenValuePair& tokenValuePair) {
+    return TokenValuePair{.token = tokenValuePair.token, .value = tokenValuePair.value};
+}
+#endif  // NN_AIDL_V4_OR_ABOVE
+
 GeneralResult<Capabilities> convert(const aidl_hal::Capabilities& capabilities) {
     return validatedConvert(capabilities);
 }
@@ -562,6 +568,17 @@
 GeneralResult<std::vector<SharedMemory>> convert(const std::vector<aidl_hal::Memory>& memories) {
     return validatedConvert(memories);
 }
+GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+        const std::vector<aidl_hal::ExtensionNameAndPrefix>& extensionNameAndPrefix) {
+    return unvalidatedConvert(extensionNameAndPrefix);
+}
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<std::vector<TokenValuePair>> convert(
+        const std::vector<aidl_hal::TokenValuePair>& metaData) {
+    return validatedConvert(metaData);
+}
+#endif  // NN_AIDL_V4_OR_ABOVE
 
 GeneralResult<std::vector<OutputShape>> convert(
         const std::vector<aidl_hal::OutputShape>& outputShapes) {
@@ -942,7 +959,7 @@
 }
 
 nn::GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
-        const nn::Model::ExtensionNameAndPrefix& extensionNameToPrefix) {
+        const nn::ExtensionNameAndPrefix& extensionNameToPrefix) {
     return ExtensionNameAndPrefix{
             .name = extensionNameToPrefix.name,
             .prefix = extensionNameToPrefix.prefix,
@@ -1055,6 +1072,11 @@
     return Extension{.name = extension.name,
                      .operandTypes = NN_TRY(unvalidatedConvert(extension.operandTypes))};
 }
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<TokenValuePair> unvalidatedConvert(const nn::TokenValuePair& tokenValuePair) {
+    return TokenValuePair{.token = tokenValuePair.token, .value = tokenValuePair.value};
+}
+#endif  // NN_AIDL_V4_OR_ABOVE
 
 nn::GeneralResult<std::vector<uint8_t>> convert(const nn::CacheToken& cacheToken) {
     return validatedConvert(cacheToken);
@@ -1134,6 +1156,17 @@
         const std::vector<nn::SyncFence>& syncFences) {
     return validatedConvert(syncFences);
 }
+nn::GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) {
+    return unvalidatedConvert(extensionNameToPrefix);
+}
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<std::vector<TokenValuePair>> convert(
+        const std::vector<nn::TokenValuePair>& metaData) {
+    return validatedConvert(metaData);
+}
+#endif  // NN_AIDL_V4_OR_ABOVE
 
 nn::GeneralResult<std::vector<Extension>> convert(const std::vector<nn::Extension>& extensions) {
     return validatedConvert(extensions);
diff --git a/neuralnetworks/aidl/utils/src/Device.cpp b/neuralnetworks/aidl/utils/src/Device.cpp
index 5b7ec4e..f3f4fdb 100644
--- a/neuralnetworks/aidl/utils/src/Device.cpp
+++ b/neuralnetworks/aidl/utils/src/Device.cpp
@@ -215,7 +215,9 @@
 nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
         const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
         nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     // Ensure that model is ready for IPC.
     std::optional<nn::Model> maybeModelInShared;
     const nn::Model& modelInShared =
@@ -225,17 +227,28 @@
     const auto aidlPreference = NN_TRY(convert(preference));
     const auto aidlPriority = NN_TRY(convert(priority));
     const auto aidlDeadline = NN_TRY(convert(deadline));
-    const auto aidlModelCache = NN_TRY(convert(modelCache));
-    const auto aidlDataCache = NN_TRY(convert(dataCache));
+    auto aidlModelCache = NN_TRY(convert(modelCache));
+    auto aidlDataCache = NN_TRY(convert(dataCache));
     const auto aidlToken = NN_TRY(convert(token));
 
-    const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>();
+    const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>(kFeatureLevel);
     const auto scoped = kDeathHandler.protectCallback(cb.get());
 
+    if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+        auto aidlHints = NN_TRY(convert(hints));
+        auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+        const auto ret = kDevice->prepareModelWithConfig(
+                aidlModel,
+                {aidlPreference, aidlPriority, aidlDeadline, std::move(aidlModelCache),
+                 std::move(aidlDataCache), aidlToken, std::move(aidlHints),
+                 std::move(aidlExtensionPrefix)},
+                cb);
+        HANDLE_ASTATUS(ret) << "prepareModel failed";
+        return cb->get();
+    }
     const auto ret = kDevice->prepareModel(aidlModel, aidlPreference, aidlPriority, aidlDeadline,
                                            aidlModelCache, aidlDataCache, aidlToken, cb);
     HANDLE_ASTATUS(ret) << "prepareModel failed";
-
     return cb->get();
 }
 
@@ -247,7 +260,7 @@
     const auto aidlDataCache = NN_TRY(convert(dataCache));
     const auto aidlToken = NN_TRY(convert(token));
 
-    const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>();
+    const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>(kFeatureLevel);
     const auto scoped = kDeathHandler.protectCallback(cb.get());
 
     const auto ret = kDevice->prepareModelFromCache(aidlDeadline, aidlModelCache, aidlDataCache,
diff --git a/neuralnetworks/aidl/utils/src/Execution.cpp b/neuralnetworks/aidl/utils/src/Execution.cpp
index 94edd90..2fd88af 100644
--- a/neuralnetworks/aidl/utils/src/Execution.cpp
+++ b/neuralnetworks/aidl/utils/src/Execution.cpp
@@ -35,44 +35,61 @@
 
 namespace aidl::android::hardware::neuralnetworks::utils {
 
-nn::GeneralResult<std::shared_ptr<const Execution>> Execution::create(
-        std::shared_ptr<const PreparedModel> preparedModel, Request request,
-        hal::utils::RequestRelocation relocation, bool measure, int64_t loopTimeoutDuration) {
+nn::GeneralResult<std::shared_ptr<const ExecutionWithCachedRequest>>
+ExecutionWithCachedRequest::create(std::shared_ptr<const PreparedModel> preparedModel,
+                                   Request request, hal::utils::RequestRelocation relocation,
+                                   bool measure, int64_t loopTimeoutDuration) {
     if (preparedModel == nullptr) {
-        return NN_ERROR() << "aidl::utils::Execution::create must have non-null preparedModel";
+        return NN_ERROR() << "aidl::utils::ExecutionWithCachedRequest::create must have non-null "
+                             "preparedModel";
     }
 
-    return std::make_shared<const Execution>(PrivateConstructorTag{}, std::move(preparedModel),
-                                             std::move(request), std::move(relocation), measure,
-                                             loopTimeoutDuration);
+    return std::make_shared<const ExecutionWithCachedRequest>(
+            PrivateConstructorTag{}, std::move(preparedModel), std::move(request),
+            std::move(relocation), measure, loopTimeoutDuration);
 }
 
-Execution::Execution(PrivateConstructorTag /*tag*/,
-                     std::shared_ptr<const PreparedModel> preparedModel, Request request,
-                     hal::utils::RequestRelocation relocation, bool measure,
-                     int64_t loopTimeoutDuration)
+ExecutionWithCachedRequest::ExecutionWithCachedRequest(
+        PrivateConstructorTag /*tag*/, std::shared_ptr<const PreparedModel> preparedModel,
+        Request request, hal::utils::RequestRelocation relocation, bool measure,
+        int64_t loopTimeoutDuration)
     : kPreparedModel(std::move(preparedModel)),
       kRequest(std::move(request)),
       kRelocation(std::move(relocation)),
       kMeasure(measure),
       kLoopTimeoutDuration(loopTimeoutDuration) {}
 
-nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Execution::compute(
-        const nn::OptionalTimePoint& deadline) const {
+nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
+ExecutionWithCachedRequest::compute(const nn::OptionalTimePoint& deadline) const {
     const auto aidlDeadline = NN_TRY(convert(deadline));
     return kPreparedModel->executeInternal(kRequest, kMeasure, aidlDeadline, kLoopTimeoutDuration,
-                                           kRelocation);
+                                           {}, {}, kRelocation);
 }
 
-nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> Execution::computeFenced(
+nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
+ExecutionWithCachedRequest::computeFenced(
         const std::vector<nn::SyncFence>& waitFor, const nn::OptionalTimePoint& deadline,
         const nn::OptionalDuration& timeoutDurationAfterFence) const {
     const auto aidlWaitFor = NN_TRY(convert(waitFor));
     const auto aidlDeadline = NN_TRY(convert(deadline));
     const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence));
-    return kPreparedModel->executeFencedInternal(kRequest, aidlWaitFor, kMeasure, aidlDeadline,
-                                                 kLoopTimeoutDuration,
-                                                 aidlTimeoutDurationAfterFence, kRelocation);
+    return kPreparedModel->executeFencedInternal(
+            kRequest, aidlWaitFor, kMeasure, aidlDeadline, kLoopTimeoutDuration,
+            aidlTimeoutDurationAfterFence, {}, {}, kRelocation);
 }
 
+nn::GeneralResult<std::shared_ptr<const Execution>> Execution::create(
+        std::shared_ptr<aidl_hal::IExecution> execution, hal::utils::RequestRelocation relocation) {
+    if (execution == nullptr) {
+        return NN_ERROR() << "aidl::utils::Execution::create must have non-null execution";
+    }
+
+    return std::make_shared<const Execution>(PrivateConstructorTag{}, std::move(execution),
+                                             std::move(relocation));
+}
+
+Execution::Execution(PrivateConstructorTag /*tag*/, std::shared_ptr<aidl_hal::IExecution> execution,
+                     hal::utils::RequestRelocation relocation)
+    : kExecution(std::move(execution)), kRelocation(std::move(relocation)) {}
+
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/src/InvalidDevice.cpp b/neuralnetworks/aidl/utils/src/InvalidDevice.cpp
index c9d9955..33270ff 100644
--- a/neuralnetworks/aidl/utils/src/InvalidDevice.cpp
+++ b/neuralnetworks/aidl/utils/src/InvalidDevice.cpp
@@ -167,6 +167,31 @@
     return ndk::ScopedAStatus::ok();
 }
 
+ndk::ScopedAStatus InvalidDevice::prepareModelWithConfig(
+        const Model& model, const PrepareModelConfig& config,
+        const std::shared_ptr<IPreparedModelCallback>& callback) {
+    if (!utils::valid(config.extensionNameToPrefix)) {
+        callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
+        return toAStatus(ErrorStatus::INVALID_ARGUMENT, "Invalid extensionNameToPrefix");
+    }
+    for (const auto& hint : config.compilationHints) {
+        auto result = std::find_if(config.extensionNameToPrefix.begin(),
+                                   config.extensionNameToPrefix.end(),
+                                   [&hint](const ExtensionNameAndPrefix& extension) {
+                                       uint16_t prefix = static_cast<uint32_t>(hint.token) >>
+                                                         IDevice::EXTENSION_TYPE_LOW_BITS_TYPE;
+                                       return prefix == extension.prefix;
+                                   });
+        if (result == config.extensionNameToPrefix.end()) {
+            callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
+            return toAStatus(ErrorStatus::INVALID_ARGUMENT,
+                             "Invalid token for compilation hints: " + std::to_string(hint.token));
+        }
+    }
+    return prepareModel(model, config.preference, config.priority, config.deadlineNs,
+                        config.modelCache, config.dataCache, config.cacheToken, callback);
+}
+
 ndk::ScopedAStatus InvalidDevice::prepareModelFromCache(
         int64_t /*deadline*/, const std::vector<ndk::ScopedFileDescriptor>& /*modelCache*/,
         const std::vector<ndk::ScopedFileDescriptor>& /*dataCache*/,
diff --git a/neuralnetworks/aidl/utils/src/PreparedModel.cpp b/neuralnetworks/aidl/utils/src/PreparedModel.cpp
index f25c2c8..7e3a31c 100644
--- a/neuralnetworks/aidl/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/aidl/utils/src/PreparedModel.cpp
@@ -54,61 +54,16 @@
     return std::make_pair(NN_TRY(nn::convert(timingLaunched)), NN_TRY(nn::convert(timingFenced)));
 }
 
-}  // namespace
-
-nn::GeneralResult<std::shared_ptr<const PreparedModel>> PreparedModel::create(
-        std::shared_ptr<aidl_hal::IPreparedModel> preparedModel) {
-    if (preparedModel == nullptr) {
-        return NN_ERROR()
-               << "aidl_hal::utils::PreparedModel::create must have non-null preparedModel";
-    }
-
-    return std::make_shared<const PreparedModel>(PrivateConstructorTag{}, std::move(preparedModel));
-}
-
-PreparedModel::PreparedModel(PrivateConstructorTag /*tag*/,
-                             std::shared_ptr<aidl_hal::IPreparedModel> preparedModel)
-    : kPreparedModel(std::move(preparedModel)) {}
-
-nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
-        const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalTimePoint& deadline,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
-    // Ensure that request is ready for IPC.
-    std::optional<nn::Request> maybeRequestInShared;
-    hal::utils::RequestRelocation relocation;
-    const nn::Request& requestInShared = NN_TRY(hal::utils::convertRequestFromPointerToShared(
-            &request, nn::kDefaultRequestMemoryAlignment, nn::kDefaultRequestMemoryPadding,
-            &maybeRequestInShared, &relocation));
-
-    const auto aidlRequest = NN_TRY(convert(requestInShared));
-    const auto aidlMeasure = NN_TRY(convert(measure));
-    const auto aidlDeadline = NN_TRY(convert(deadline));
-    const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
-    return executeInternal(aidlRequest, aidlMeasure, aidlDeadline, aidlLoopTimeoutDuration,
-                           relocation);
-}
-
-nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
-PreparedModel::executeInternal(const Request& request, bool measure, int64_t deadline,
-                               int64_t loopTimeoutDuration,
-                               const hal::utils::RequestRelocation& relocation) const {
-    if (relocation.input) {
-        relocation.input->flush();
-    }
-
-    ExecutionResult executionResult;
-    const auto ret = kPreparedModel->executeSynchronously(request, measure, deadline,
-                                                          loopTimeoutDuration, &executionResult);
-    HANDLE_ASTATUS(ret) << "executeSynchronously failed";
-    if (!executionResult.outputSufficientSize) {
+nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> handleExecutionResult(
+        const ExecutionResult& result, const hal::utils::RequestRelocation& relocation) {
+    if (!result.outputSufficientSize) {
         auto canonicalOutputShapes =
-                nn::convert(executionResult.outputShapes).value_or(std::vector<nn::OutputShape>{});
+                nn::convert(result.outputShapes).value_or(std::vector<nn::OutputShape>{});
         return NN_ERROR(nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, std::move(canonicalOutputShapes))
                << "execution failed with " << nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
     }
     auto [outputShapes, timing] =
-            NN_TRY(convertExecutionResults(executionResult.outputShapes, executionResult.timing));
+            NN_TRY(convertExecutionResults(result.outputShapes, result.timing));
 
     if (relocation.output) {
         relocation.output->flush();
@@ -117,44 +72,8 @@
 }
 
 nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
-                             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
-                             const nn::OptionalDuration& loopTimeoutDuration,
-                             const nn::OptionalDuration& timeoutDurationAfterFence) const {
-    // Ensure that request is ready for IPC.
-    std::optional<nn::Request> maybeRequestInShared;
-    hal::utils::RequestRelocation relocation;
-    const nn::Request& requestInShared = NN_TRY(hal::utils::convertRequestFromPointerToShared(
-            &request, nn::kDefaultRequestMemoryAlignment, nn::kDefaultRequestMemoryPadding,
-            &maybeRequestInShared, &relocation));
-
-    const auto aidlRequest = NN_TRY(convert(requestInShared));
-    const auto aidlWaitFor = NN_TRY(convert(waitFor));
-    const auto aidlMeasure = NN_TRY(convert(measure));
-    const auto aidlDeadline = NN_TRY(convert(deadline));
-    const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
-    const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence));
-    return executeFencedInternal(aidlRequest, aidlWaitFor, aidlMeasure, aidlDeadline,
-                                 aidlLoopTimeoutDuration, aidlTimeoutDurationAfterFence,
-                                 relocation);
-}
-
-nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFencedInternal(const Request& request,
-                                     const std::vector<ndk::ScopedFileDescriptor>& waitFor,
-                                     bool measure, int64_t deadline, int64_t loopTimeoutDuration,
-                                     int64_t timeoutDurationAfterFence,
-                                     const hal::utils::RequestRelocation& relocation) const {
-    if (relocation.input) {
-        relocation.input->flush();
-    }
-
-    FencedExecutionResult result;
-    const auto ret =
-            kPreparedModel->executeFenced(request, waitFor, measure, deadline, loopTimeoutDuration,
-                                          timeoutDurationAfterFence, &result);
-    HANDLE_ASTATUS(ret) << "executeFenced failed";
-
+handleFencedExecutionResult(const FencedExecutionResult& result,
+                            const hal::utils::RequestRelocation& relocation) {
     auto resultSyncFence = nn::SyncFence::createAsSignaled();
     if (result.syncFence.get() != -1) {
         resultSyncFence = nn::SyncFence::create(NN_TRY(nn::convert(result.syncFence))).value();
@@ -165,7 +84,7 @@
         return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "callback is null";
     }
 
-    // If executeFenced required the request memory to be moved into shared memory, block here until
+    // If computeFenced required the request memory to be moved into shared memory, block here until
     // the fenced execution has completed and flush the memory back.
     if (relocation.output) {
         const auto state = resultSyncFence.syncWait({});
@@ -189,9 +108,133 @@
     return std::make_pair(std::move(resultSyncFence), std::move(resultCallback));
 }
 
+}  // namespace
+
+nn::GeneralResult<std::shared_ptr<const PreparedModel>> PreparedModel::create(
+        std::shared_ptr<aidl_hal::IPreparedModel> preparedModel, nn::Version featureLevel) {
+    if (preparedModel == nullptr) {
+        return NN_ERROR()
+               << "aidl_hal::utils::PreparedModel::create must have non-null preparedModel";
+    }
+
+    return std::make_shared<const PreparedModel>(PrivateConstructorTag{}, std::move(preparedModel),
+                                                 featureLevel);
+}
+
+PreparedModel::PreparedModel(PrivateConstructorTag /*tag*/,
+                             std::shared_ptr<aidl_hal::IPreparedModel> preparedModel,
+                             nn::Version featureLevel)
+    : kPreparedModel(std::move(preparedModel)), kFeatureLevel(featureLevel) {}
+
+nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
+        const nn::Request& request, nn::MeasureTiming measure,
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+    // Ensure that request is ready for IPC.
+    std::optional<nn::Request> maybeRequestInShared;
+    hal::utils::RequestRelocation relocation;
+    const nn::Request& requestInShared = NN_TRY(hal::utils::convertRequestFromPointerToShared(
+            &request, nn::kDefaultRequestMemoryAlignment, nn::kDefaultRequestMemoryPadding,
+            &maybeRequestInShared, &relocation));
+
+    const auto aidlRequest = NN_TRY(convert(requestInShared));
+    const auto aidlMeasure = NN_TRY(convert(measure));
+    const auto aidlDeadline = NN_TRY(convert(deadline));
+    const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
+    return executeInternal(aidlRequest, aidlMeasure, aidlDeadline, aidlLoopTimeoutDuration, hints,
+                           extensionNameToPrefix, relocation);
+}
+
+nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
+PreparedModel::executeInternal(const Request& request, bool measure, int64_t deadline,
+                               int64_t loopTimeoutDuration,
+                               const std::vector<nn::TokenValuePair>& hints,
+                               const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
+                               const hal::utils::RequestRelocation& relocation) const {
+    if (relocation.input) {
+        relocation.input->flush();
+    }
+
+    ExecutionResult executionResult;
+    if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+        auto aidlHints = NN_TRY(convert(hints));
+        auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+        const auto ret = kPreparedModel->executeSynchronouslyWithConfig(
+                request,
+                {measure, loopTimeoutDuration, std::move(aidlHints),
+                 std::move(aidlExtensionPrefix)},
+                deadline, &executionResult);
+        HANDLE_ASTATUS(ret) << "executeSynchronouslyWithConfig failed";
+    } else {
+        const auto ret = kPreparedModel->executeSynchronously(
+                request, measure, deadline, loopTimeoutDuration, &executionResult);
+        HANDLE_ASTATUS(ret) << "executeSynchronously failed";
+    }
+    return handleExecutionResult(executionResult, relocation);
+}
+
+nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
+PreparedModel::executeFenced(
+        const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
+        nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const nn::OptionalDuration& timeoutDurationAfterFence,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+    // Ensure that request is ready for IPC.
+    std::optional<nn::Request> maybeRequestInShared;
+    hal::utils::RequestRelocation relocation;
+    const nn::Request& requestInShared = NN_TRY(hal::utils::convertRequestFromPointerToShared(
+            &request, nn::kDefaultRequestMemoryAlignment, nn::kDefaultRequestMemoryPadding,
+            &maybeRequestInShared, &relocation));
+
+    const auto aidlRequest = NN_TRY(convert(requestInShared));
+    const auto aidlWaitFor = NN_TRY(convert(waitFor));
+    const auto aidlMeasure = NN_TRY(convert(measure));
+    const auto aidlDeadline = NN_TRY(convert(deadline));
+    const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
+    const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence));
+    return executeFencedInternal(aidlRequest, aidlWaitFor, aidlMeasure, aidlDeadline,
+                                 aidlLoopTimeoutDuration, aidlTimeoutDurationAfterFence, hints,
+                                 extensionNameToPrefix, relocation);
+}
+
+nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
+PreparedModel::executeFencedInternal(
+        const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor, bool measure,
+        int64_t deadline, int64_t loopTimeoutDuration, int64_t timeoutDurationAfterFence,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
+        const hal::utils::RequestRelocation& relocation) const {
+    if (relocation.input) {
+        relocation.input->flush();
+    }
+
+    FencedExecutionResult result;
+    if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+        auto aidlHints = NN_TRY(convert(hints));
+        auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+        const auto ret = kPreparedModel->executeFencedWithConfig(
+                request, waitFor,
+                {measure, loopTimeoutDuration, std::move(aidlHints),
+                 std::move(aidlExtensionPrefix)},
+                deadline, timeoutDurationAfterFence, &result);
+        HANDLE_ASTATUS(ret) << "executeFencedWithConfig failed";
+    } else {
+        const auto ret = kPreparedModel->executeFenced(request, waitFor, measure, deadline,
+                                                       loopTimeoutDuration,
+                                                       timeoutDurationAfterFence, &result);
+        HANDLE_ASTATUS(ret) << "executeFenced failed";
+    }
+    return handleFencedExecutionResult(result, relocation);
+}
+
 nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     // Ensure that request is ready for IPC.
     std::optional<nn::Request> maybeRequestInShared;
     hal::utils::RequestRelocation relocation;
@@ -202,15 +245,31 @@
     auto aidlRequest = NN_TRY(convert(requestInShared));
     auto aidlMeasure = NN_TRY(convert(measure));
     auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
-    return Execution::create(shared_from_this(), std::move(aidlRequest), std::move(relocation),
-                             aidlMeasure, aidlLoopTimeoutDuration);
+
+    if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+        std::shared_ptr<IExecution> execution;
+        auto aidlHints = NN_TRY(convert(hints));
+        auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+
+        const auto ret = kPreparedModel->createReusableExecution(
+                aidlRequest,
+                {aidlMeasure, aidlLoopTimeoutDuration, std::move(aidlHints),
+                 std::move(aidlExtensionPrefix)},
+                &execution);
+        HANDLE_ASTATUS(ret) << "createReusableExecution failed";
+        return Execution::create(std::move(execution), std::move(relocation));
+    }
+
+    return ExecutionWithCachedRequest::create(shared_from_this(), std::move(aidlRequest),
+                                              std::move(relocation), aidlMeasure,
+                                              aidlLoopTimeoutDuration);
 }
 
 nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const {
     std::shared_ptr<IBurst> burst;
     const auto ret = kPreparedModel->configureExecutionBurst(&burst);
     HANDLE_ASTATUS(ret) << "configureExecutionBurst failed";
-    return Burst::create(std::move(burst));
+    return Burst::create(std::move(burst), kFeatureLevel);
 }
 
 std::any PreparedModel::getUnderlyingResource() const {
@@ -218,4 +277,36 @@
     return resource;
 }
 
+nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Execution::compute(
+        const nn::OptionalTimePoint& deadline) const {
+    const auto aidlDeadline = NN_TRY(convert(deadline));
+
+    if (kRelocation.input) {
+        kRelocation.input->flush();
+    }
+
+    ExecutionResult executionResult;
+    auto ret = kExecution->executeSynchronously(aidlDeadline, &executionResult);
+    HANDLE_ASTATUS(ret) << "executeSynchronously failed";
+    return handleExecutionResult(executionResult, kRelocation);
+}
+
+nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> Execution::computeFenced(
+        const std::vector<nn::SyncFence>& waitFor, const nn::OptionalTimePoint& deadline,
+        const nn::OptionalDuration& timeoutDurationAfterFence) const {
+    const auto aidlWaitFor = NN_TRY(convert(waitFor));
+    const auto aidlDeadline = NN_TRY(convert(deadline));
+    const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence));
+
+    if (kRelocation.input) {
+        kRelocation.input->flush();
+    }
+
+    FencedExecutionResult result;
+    const auto ret = kExecution->executeFenced(aidlWaitFor, aidlDeadline,
+                                               aidlTimeoutDurationAfterFence, &result);
+    HANDLE_ASTATUS(ret) << "executeFenced failed";
+    return handleFencedExecutionResult(result, kRelocation);
+}
+
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/DeviceTest.cpp b/neuralnetworks/aidl/utils/test/DeviceTest.cpp
index 0366e7d..73727b3 100644
--- a/neuralnetworks/aidl/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/aidl/utils/test/DeviceTest.cpp
@@ -17,6 +17,7 @@
 #include "MockBuffer.h"
 #include "MockDevice.h"
 #include "MockPreparedModel.h"
+#include "TestUtils.h"
 
 #include <aidl/android/hardware/neuralnetworks/BnDevice.h>
 #include <android/binder_auto_utils.h>
@@ -60,7 +61,6 @@
                                                 .powerUsage = std::numeric_limits<float>::max()};
 constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles - 1,
                                                     .numDataCache = nn::kMaxNumberOfCacheFiles};
-
 constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); };
 
 std::shared_ptr<MockDevice> createMockDevice() {
@@ -123,6 +123,18 @@
     };
 }
 
+const std::vector<nn::TokenValuePair> kHints = {nn::TokenValuePair{.token = 0, .value = {1}}};
+const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix = {
+        nn::ExtensionNameAndPrefix{.name = "com.android.nn_test", .prefix = 1}};
+auto makePreparedModelWithConfigReturn(ErrorStatus launchStatus, ErrorStatus returnStatus,
+                                       const std::shared_ptr<MockPreparedModel>& preparedModel) {
+    return [launchStatus, returnStatus, preparedModel](
+                   const Model& /*model*/, const PrepareModelConfig& /*config*/,
+                   const std::shared_ptr<IPreparedModelCallback>& cb) -> ndk::ScopedAStatus {
+        return makePreparedModelReturnImpl(launchStatus, returnStatus, preparedModel, cb);
+    };
+}
+
 auto makePreparedModelFromCacheReturn(ErrorStatus launchStatus, ErrorStatus returnStatus,
                                       const std::shared_ptr<MockPreparedModel>& preparedModel) {
     return [launchStatus, returnStatus, preparedModel](
@@ -146,26 +158,7 @@
     return ndk::ScopedAStatus::fromStatus(STATUS_DEAD_OBJECT);
 };
 
-class DeviceTest : public ::testing::TestWithParam<nn::Version> {
-  protected:
-    const nn::Version kVersion = GetParam();
-};
-
-std::string printDeviceTest(const testing::TestParamInfo<nn::Version>& info) {
-    const nn::Version version = info.param;
-    CHECK(!version.runtimeOnlyFeatures);
-    switch (version.level) {
-        case nn::Version::Level::FEATURE_LEVEL_5:
-            return "v1";
-        case nn::Version::Level::FEATURE_LEVEL_6:
-            return "v2";
-        case nn::Version::Level::FEATURE_LEVEL_7:
-            return "v3";
-        default:
-            LOG(FATAL) << "Invalid AIDL version: " << version;
-            return "invalid";
-    }
-}
+class DeviceTest : public VersionedAidlUtilsTestBase {};
 
 }  // namespace
 
@@ -578,6 +571,8 @@
 }
 
 TEST_P(DeviceTest, prepareModel) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup call
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -589,7 +584,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -598,6 +593,8 @@
 }
 
 TEST_P(DeviceTest, prepareModelLaunchError) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup call
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -608,7 +605,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -616,6 +613,8 @@
 }
 
 TEST_P(DeviceTest, prepareModelReturnError) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup call
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -626,7 +625,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -634,6 +633,8 @@
 }
 
 TEST_P(DeviceTest, prepareModelNullptrError) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup call
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -644,7 +645,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -652,6 +653,8 @@
 }
 
 TEST_P(DeviceTest, prepareModelTransportFailure) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup call
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -661,7 +664,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -669,6 +672,8 @@
 }
 
 TEST_P(DeviceTest, prepareModelDeadObject) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup call
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -678,7 +683,7 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -686,6 +691,8 @@
 }
 
 TEST_P(DeviceTest, prepareModelAsyncCrash) {
+    if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
     // setup test
     const auto mockDevice = createMockDevice();
     const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -699,7 +706,157 @@
 
     // run test
     const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
-                                             nn::Priority::DEFAULT, {}, {}, {}, {});
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfig) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makePreparedModelWithConfigReturn(ErrorStatus::NONE, ErrorStatus::NONE,
+                                                               mockPreparedModel)));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_NE(result.value(), nullptr);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigLaunchError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makePreparedModelWithConfigReturn(
+                    ErrorStatus::GENERAL_FAILURE, ErrorStatus::GENERAL_FAILURE, nullptr)));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigReturnError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makePreparedModelWithConfigReturn(
+                    ErrorStatus::NONE, ErrorStatus::GENERAL_FAILURE, nullptr)));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigNullptrError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makePreparedModelWithConfigReturn(ErrorStatus::NONE, ErrorStatus::NONE,
+                                                               nullptr)));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigTransportFailure) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigDeadObject) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigAsyncCrash) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockDevice = createMockDevice();
+    const auto device = Device::create(kName, mockDevice, kVersion).value();
+    const auto ret = [&device]() {
+        DeathMonitor::serviceDied(device->getDeathMonitor());
+        return ndk::ScopedAStatus::ok();
+    };
+    EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(ret));
+
+    // run test
+    const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+                                             nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+                                             kExtensionNameToPrefix);
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -894,9 +1051,6 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-INSTANTIATE_TEST_SUITE_P(TestDevice, DeviceTest,
-                         ::testing::Values(nn::kVersionFeatureLevel5, nn::kVersionFeatureLevel6,
-                                           nn::kVersionFeatureLevel7),
-                         printDeviceTest);
+INSTANTIATE_VERSIONED_AIDL_UTILS_TEST(DeviceTest, kAllAidlVersions);
 
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/ExecutionTest.cpp b/neuralnetworks/aidl/utils/test/ExecutionTest.cpp
new file mode 100644
index 0000000..8519290
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/ExecutionTest.cpp
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MockExecution.h"
+#include "MockFencedExecutionCallback.h"
+
+#include <aidl/android/hardware/neuralnetworks/IFencedExecutionCallback.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <nnapi/IExecution.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/aidl/Execution.h>
+
+#include <functional>
+#include <memory>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+namespace {
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Invoke;
+using ::testing::InvokeWithoutArgs;
+using ::testing::SetArgPointee;
+
+const std::shared_ptr<IExecution> kInvalidExecution;
+constexpr auto kNoTiming = Timing{.timeOnDeviceNs = -1, .timeInDriverNs = -1};
+
+constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); };
+
+constexpr auto makeGeneralFailure = [] {
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+};
+constexpr auto makeGeneralTransportFailure = [] {
+    return ndk::ScopedAStatus::fromStatus(STATUS_NO_MEMORY);
+};
+constexpr auto makeDeadObjectFailure = [] {
+    return ndk::ScopedAStatus::fromStatus(STATUS_DEAD_OBJECT);
+};
+
+auto makeFencedExecutionResult(const std::shared_ptr<MockFencedExecutionCallback>& callback) {
+    return [callback](const std::vector<ndk::ScopedFileDescriptor>& /*waitFor*/,
+                      int64_t /*deadline*/, int64_t /*duration*/,
+                      FencedExecutionResult* fencedExecutionResult) {
+        *fencedExecutionResult = FencedExecutionResult{.callback = callback,
+                                                       .syncFence = ndk::ScopedFileDescriptor(-1)};
+        return ndk::ScopedAStatus::ok();
+    };
+}
+
+}  // namespace
+
+TEST(ExecutionTest, invalidExecution) {
+    // run test
+    const auto result = Execution::create(kInvalidExecution, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeSync) {
+    // setup call
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    const auto mockExecutionResult = ExecutionResult{
+            .outputSufficientSize = true,
+            .outputShapes = {},
+            .timing = kNoTiming,
+    };
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(
+                    DoAll(SetArgPointee<1>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    EXPECT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ExecutionTest, executeSyncError) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(Invoke(makeGeneralFailure));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeSyncTransportFailure) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeSyncDeadObject) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ExecutionTest, executeFenced) {
+    // setup call
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    const auto mockCallback = MockFencedExecutionCallback::create();
+    EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+            .Times(1)
+            .WillOnce(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+                            SetArgPointee<2>(ErrorStatus::NONE), Invoke(makeStatusOk)));
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    const auto& [syncFence, callback] = result.value();
+    EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED);
+    ASSERT_NE(callback, nullptr);
+
+    // get results from callback
+    const auto callbackResult = callback();
+    ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code << ": "
+                                            << callbackResult.error().message;
+}
+
+TEST(ExecutionTest, executeFencedCallbackError) {
+    // setup call
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    const auto mockCallback = MockFencedExecutionCallback::create();
+    EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+                                   SetArgPointee<2>(ErrorStatus::GENERAL_FAILURE),
+                                   Invoke(makeStatusOk))));
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    const auto& [syncFence, callback] = result.value();
+    EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE);
+    ASSERT_NE(callback, nullptr);
+
+    // verify callback failure
+    const auto callbackResult = callback();
+    ASSERT_FALSE(callbackResult.has_value());
+    EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeFencedError) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeFencedTransportFailure) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeFencedDeadObject) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/MockBuffer.h b/neuralnetworks/aidl/utils/test/MockBuffer.h
index f77fa86..7a05a0f 100644
--- a/neuralnetworks/aidl/utils/test/MockBuffer.h
+++ b/neuralnetworks/aidl/utils/test/MockBuffer.h
@@ -21,7 +21,6 @@
 #include <android/binder_interface_utils.h>
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
-#include <hidl/Status.h>
 
 namespace aidl::android::hardware::neuralnetworks::utils {
 
diff --git a/neuralnetworks/aidl/utils/test/MockBurst.h b/neuralnetworks/aidl/utils/test/MockBurst.h
index 5083bbd..609bd30 100644
--- a/neuralnetworks/aidl/utils/test/MockBurst.h
+++ b/neuralnetworks/aidl/utils/test/MockBurst.h
@@ -21,7 +21,6 @@
 #include <android/binder_interface_utils.h>
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
-#include <hidl/Status.h>
 
 namespace aidl::android::hardware::neuralnetworks::utils {
 
@@ -32,6 +31,10 @@
                  bool measureTiming, int64_t deadline, int64_t loopTimeoutDuration,
                  ExecutionResult* executionResult),
                 (override));
+    MOCK_METHOD(ndk::ScopedAStatus, executeSynchronouslyWithConfig,
+                (const Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
+                 const ExecutionConfig& config, int64_t deadline, ExecutionResult* executionResult),
+                (override));
     MOCK_METHOD(ndk::ScopedAStatus, releaseMemoryResource, (int64_t memoryIdentifierToken),
                 (override));
 };
diff --git a/neuralnetworks/aidl/utils/test/MockDevice.h b/neuralnetworks/aidl/utils/test/MockDevice.h
index 3a28d55..47b8346 100644
--- a/neuralnetworks/aidl/utils/test/MockDevice.h
+++ b/neuralnetworks/aidl/utils/test/MockDevice.h
@@ -50,6 +50,10 @@
                  const std::vector<uint8_t>& token,
                  const std::shared_ptr<IPreparedModelCallback>& callback),
                 (override));
+    MOCK_METHOD(ndk::ScopedAStatus, prepareModelWithConfig,
+                (const Model& model, const PrepareModelConfig& config,
+                 const std::shared_ptr<IPreparedModelCallback>& callback),
+                (override));
     MOCK_METHOD(ndk::ScopedAStatus, prepareModelFromCache,
                 (int64_t deadline, const std::vector<ndk::ScopedFileDescriptor>& modelCache,
                  const std::vector<ndk::ScopedFileDescriptor>& dataCache,
diff --git a/neuralnetworks/aidl/utils/test/MockExecution.h b/neuralnetworks/aidl/utils/test/MockExecution.h
new file mode 100644
index 0000000..782e54f
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/MockExecution.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_EXECUTION_H
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_EXECUTION_H
+
+#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
+#include <android/binder_interface_utils.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+
+class MockExecution final : public BnExecution {
+  public:
+    static std::shared_ptr<MockExecution> create();
+
+    MOCK_METHOD(ndk::ScopedAStatus, executeSynchronously,
+                (int64_t deadline, ExecutionResult* executionResult), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, executeFenced,
+                (const std::vector<ndk::ScopedFileDescriptor>& waitFor, int64_t deadline,
+                 int64_t duration, FencedExecutionResult* fencedExecutionResult),
+                (override));
+};
+
+inline std::shared_ptr<MockExecution> MockExecution::create() {
+    return ndk::SharedRefBase::make<MockExecution>();
+}
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_EXECUTION_H
diff --git a/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h b/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h
index 06f9ea2..29449bb 100644
--- a/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h
+++ b/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h
@@ -22,7 +22,6 @@
 #include <android/binder_interface_utils.h>
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
-#include <hidl/Status.h>
 
 namespace aidl::android::hardware::neuralnetworks::utils {
 
diff --git a/neuralnetworks/aidl/utils/test/MockPreparedModel.h b/neuralnetworks/aidl/utils/test/MockPreparedModel.h
index a4ae2b7..a5b3b66 100644
--- a/neuralnetworks/aidl/utils/test/MockPreparedModel.h
+++ b/neuralnetworks/aidl/utils/test/MockPreparedModel.h
@@ -17,12 +17,11 @@
 #ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H
 #define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H
 
+#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
 #include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
 #include <android/binder_interface_utils.h>
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
-#include <hidl/HidlSupport.h>
-#include <hidl/Status.h>
 
 namespace aidl::android::hardware::neuralnetworks::utils {
 
@@ -39,8 +38,21 @@
                  bool measureTiming, int64_t deadline, int64_t loopTimeoutDuration,
                  int64_t duration, FencedExecutionResult* fencedExecutionResult),
                 (override));
+    MOCK_METHOD(ndk::ScopedAStatus, executeSynchronouslyWithConfig,
+                (const Request& request, const ExecutionConfig& config, int64_t deadline,
+                 ExecutionResult* executionResult),
+                (override));
+    MOCK_METHOD(ndk::ScopedAStatus, executeFencedWithConfig,
+                (const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+                 const ExecutionConfig& config, int64_t deadline, int64_t duration,
+                 FencedExecutionResult* fencedExecutionResult),
+                (override));
     MOCK_METHOD(ndk::ScopedAStatus, configureExecutionBurst, (std::shared_ptr<IBurst> * burst),
                 (override));
+    MOCK_METHOD(ndk::ScopedAStatus, createReusableExecution,
+                (const Request& request, const ExecutionConfig& config,
+                 std::shared_ptr<IExecution>* execution),
+                (override));
 };
 
 inline std::shared_ptr<MockPreparedModel> MockPreparedModel::create() {
diff --git a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
index 8bb5c90..bf6136d 100644
--- a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
@@ -15,8 +15,10 @@
  */
 
 #include "MockBurst.h"
+#include "MockExecution.h"
 #include "MockFencedExecutionCallback.h"
 #include "MockPreparedModel.h"
+#include "TestUtils.h"
 
 #include <aidl/android/hardware/neuralnetworks/IFencedExecutionCallback.h>
 #include <gmock/gmock.h>
@@ -66,21 +68,40 @@
     };
 }
 
+class PreparedModelTest : public VersionedAidlUtilsTestBase {};
+
+const std::vector<nn::TokenValuePair> kHints = {nn::TokenValuePair{.token = 0, .value = {1}}};
+const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix = {
+        nn::ExtensionNameAndPrefix{.name = "com.android.nn_test", .prefix = 1}};
+auto makeFencedExecutionWithConfigResult(
+        const std::shared_ptr<MockFencedExecutionCallback>& callback) {
+    return [callback](const Request& /*request*/,
+                      const std::vector<ndk::ScopedFileDescriptor>& /*waitFor*/,
+                      const ExecutionConfig& /*config*/, int64_t /*deadline*/, int64_t /*duration*/,
+                      FencedExecutionResult* fencedExecutionResult) {
+        *fencedExecutionResult = FencedExecutionResult{.callback = callback,
+                                                       .syncFence = ndk::ScopedFileDescriptor(-1)};
+        return ndk::ScopedAStatus::ok();
+    };
+}
+
 }  // namespace
 
-TEST(PreparedModelTest, invalidPreparedModel) {
+TEST_P(PreparedModelTest, invalidPreparedModel) {
     // run test
-    const auto result = PreparedModel::create(kInvalidPreparedModel);
+    const auto result = PreparedModel::create(kInvalidPreparedModel, kVersion);
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeSync) {
+TEST_P(PreparedModelTest, executeSync) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockExecutionResult = ExecutionResult{
             .outputSufficientSize = true,
             .outputShapes = {},
@@ -92,65 +113,73 @@
                     DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     EXPECT_TRUE(result.has_value())
             << "Failed with " << result.error().code << ": " << result.error().message;
 }
 
-TEST(PreparedModelTest, executeSyncError) {
+TEST_P(PreparedModelTest, executeSyncError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(Invoke(makeGeneralFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeSyncTransportFailure) {
+TEST_P(PreparedModelTest, executeSyncTransportFailure) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeSyncDeadObject) {
+TEST_P(PreparedModelTest, executeSyncDeadObject) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, executeFenced) {
+TEST_P(PreparedModelTest, executeFenced) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(1)
@@ -161,7 +190,7 @@
             .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -176,10 +205,12 @@
                                             << callbackResult.error().message;
 }
 
-TEST(PreparedModelTest, executeFencedCallbackError) {
+TEST_P(PreparedModelTest, executeFencedCallbackError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(1)
@@ -191,7 +222,7 @@
             .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -206,59 +237,67 @@
     EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeFencedError) {
+TEST_P(PreparedModelTest, executeFencedError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeFencedTransportFailure) {
+TEST_P(PreparedModelTest, executeFencedTransportFailure) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeFencedDeadObject) {
+TEST_P(PreparedModelTest, executeFencedDeadObject) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, reusableExecuteSync) {
+TEST_P(PreparedModelTest, reusableExecuteSync) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const uint32_t kNumberOfComputations = 2;
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockExecutionResult = ExecutionResult{
             .outputSufficientSize = true,
             .outputShapes = {},
@@ -270,7 +309,7 @@
                     DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -283,16 +322,18 @@
     }
 }
 
-TEST(PreparedModelTest, reusableExecuteSyncError) {
+TEST_P(PreparedModelTest, reusableExecuteSyncError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(Invoke(makeGeneralFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -303,16 +344,18 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteSyncTransportFailure) {
+TEST_P(PreparedModelTest, reusableExecuteSyncTransportFailure) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -323,16 +366,18 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteSyncDeadObject) {
+TEST_P(PreparedModelTest, reusableExecuteSyncDeadObject) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -343,11 +388,13 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, reusableExecuteFenced) {
+TEST_P(PreparedModelTest, reusableExecuteFenced) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const uint32_t kNumberOfComputations = 2;
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(kNumberOfComputations)
@@ -358,7 +405,7 @@
             .WillRepeatedly(Invoke(makeFencedExecutionResult(mockCallback)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -379,10 +426,12 @@
     }
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedCallbackError) {
+TEST_P(PreparedModelTest, reusableExecuteFencedCallbackError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(1)
@@ -394,7 +443,7 @@
             .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -413,16 +462,18 @@
     EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedError) {
+TEST_P(PreparedModelTest, reusableExecuteFencedError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -433,16 +484,18 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedTransportFailure) {
+TEST_P(PreparedModelTest, reusableExecuteFencedTransportFailure) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -453,16 +506,18 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedDeadObject) {
+TEST_P(PreparedModelTest, reusableExecuteFencedDeadObject) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
 
     // create execution
-    const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+    const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
     ASSERT_TRUE(createResult.has_value())
             << "Failed with " << createResult.error().code << ": " << createResult.error().message;
     ASSERT_NE(createResult.value(), nullptr);
@@ -473,14 +528,214 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, configureExecutionBurst) {
+TEST_P(PreparedModelTest, executeSyncWithConfig) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    const auto mockExecutionResult = ExecutionResult{
+            .outputSufficientSize = true,
+            .outputShapes = {},
+            .timing = kNoTiming,
+    };
+    EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+            .Times(1)
+            .WillOnce(
+                    DoAll(SetArgPointee<3>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    EXPECT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST_P(PreparedModelTest, executeSyncWithConfigError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeGeneralFailure));
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeSyncWithConfigTransportFailure) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeSyncWithConfigDeadObject) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfig) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    const auto mockCallback = MockFencedExecutionCallback::create();
+    EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+            .Times(1)
+            .WillOnce(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+                            SetArgPointee<2>(ErrorStatus::NONE), Invoke(makeStatusOk)));
+    EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeFencedExecutionWithConfigResult(mockCallback)));
+
+    // run test
+    const auto result =
+            preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    const auto& [syncFence, callback] = result.value();
+    EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED);
+    ASSERT_NE(callback, nullptr);
+
+    // get results from callback
+    const auto callbackResult = callback();
+    ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code << ": "
+                                            << callbackResult.error().message;
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigCallbackError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup call
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    const auto mockCallback = MockFencedExecutionCallback::create();
+    EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+                                   SetArgPointee<2>(ErrorStatus::GENERAL_FAILURE),
+                                   Invoke(makeStatusOk))));
+    EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeFencedExecutionWithConfigResult(mockCallback)));
+
+    // run test
+    const auto result =
+            preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    const auto& [syncFence, callback] = result.value();
+    EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE);
+    ASSERT_NE(callback, nullptr);
+
+    // verify callback failure
+    const auto callbackResult = callback();
+    ASSERT_FALSE(callbackResult.has_value());
+    EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
+
+    // run test
+    const auto result =
+            preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigTransportFailure) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result =
+            preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigDeadObject) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+    EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result =
+            preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(PreparedModelTest, configureExecutionBurst) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     const auto mockBurst = ndk::SharedRefBase::make<MockBurst>();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(DoAll(SetArgPointee<0>(mockBurst), Invoke(makeStatusOk)));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -491,13 +746,13 @@
     EXPECT_NE(result.value(), nullptr);
 }
 
-TEST(PreparedModelTest, configureExecutionBurstError) {
+TEST_P(PreparedModelTest, configureExecutionBurstError) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -507,13 +762,13 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, configureExecutionBurstTransportFailure) {
+TEST_P(PreparedModelTest, configureExecutionBurstTransportFailure) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -523,13 +778,13 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, configureExecutionBurstDeadObject) {
+TEST_P(PreparedModelTest, configureExecutionBurstDeadObject) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -539,10 +794,84 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, getUnderlyingResource) {
+TEST_P(PreparedModelTest, createReusableExecution) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto mockExecution = ndk::SharedRefBase::make<MockExecution>();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+            .Times(1)
+            .WillOnce(DoAll(SetArgPointee<2>(mockExecution), Invoke(makeStatusOk)));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_NE(result.value(), nullptr);
+}
+
+TEST_P(PreparedModelTest, createReusableExecutionError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, createReusableExecutionTransportFailure) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, createReusableExecutionDeadObject) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(PreparedModelTest, getUnderlyingResource) {
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto resource = preparedModel->getUnderlyingResource();
@@ -554,4 +883,6 @@
     EXPECT_EQ(maybeMock->get(), mockPreparedModel.get());
 }
 
+INSTANTIATE_VERSIONED_AIDL_UTILS_TEST(PreparedModelTest, kAllAidlVersions);
+
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/TestUtils.cpp b/neuralnetworks/aidl/utils/test/TestUtils.cpp
new file mode 100644
index 0000000..9abec88
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/TestUtils.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TestUtils.h"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/CommonUtils.h>
+#include <string>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+
+std::string printTestVersion(const testing::TestParamInfo<nn::Version>& info) {
+    switch (info.param.level) {
+        case nn::Version::Level::FEATURE_LEVEL_5:
+            return "v1";
+        case nn::Version::Level::FEATURE_LEVEL_6:
+            return "v2";
+        case nn::Version::Level::FEATURE_LEVEL_7:
+            return "v3";
+        case nn::Version::Level::FEATURE_LEVEL_8:
+            return "v4";
+        default:
+            LOG(FATAL) << "Invalid AIDL version: " << info.param;
+            return "invalid";
+    }
+}
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/TestUtils.h b/neuralnetworks/aidl/utils/test/TestUtils.h
new file mode 100644
index 0000000..23f734a
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/TestUtils.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_TEST_UTILS_H
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_TEST_UTILS_H
+
+#include <gtest/gtest.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/CommonUtils.h>
+#include <string>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+
+class VersionedAidlUtilsTestBase : public ::testing::TestWithParam<nn::Version> {
+  protected:
+    const nn::Version kVersion = GetParam();
+};
+
+std::string printTestVersion(const testing::TestParamInfo<nn::Version>& info);
+
+inline const auto kAllAidlVersions =
+        ::testing::Values(nn::kVersionFeatureLevel5, nn::kVersionFeatureLevel6,
+                          nn::kVersionFeatureLevel7, nn::kVersionFeatureLevel8);
+
+#define INSTANTIATE_VERSIONED_AIDL_UTILS_TEST(TestSuite, versions) \
+    INSTANTIATE_TEST_SUITE_P(Versioned, TestSuite, versions, printTestVersion)
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_TEST_UTILS_H
diff --git a/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp
index f67fd34..8c8a87a 100644
--- a/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp
+++ b/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp
@@ -58,25 +58,66 @@
     bool measureTiming;
     OutputType outputType;
     MemoryType memoryType;
+    bool reusable;
     // `reportSkipping` indicates if a test should print an info message in case
     // it is skipped. The field is set to true by default and is set to false in
     // quantization coupling tests to suppress skipping a test
     bool reportSkipping;
-    TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType)
-        : executor(executor),
-          measureTiming(measureTiming),
-          outputType(outputType),
-          memoryType(memoryType),
-          reportSkipping(true) {}
+    // `useConfig` indicates if a test should use execute*WithConfig functions for the execution.
+    bool useConfig;
     TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType,
-               bool reportSkipping)
+               bool reusable)
         : executor(executor),
           measureTiming(measureTiming),
           outputType(outputType),
           memoryType(memoryType),
-          reportSkipping(reportSkipping) {}
+          reusable(reusable),
+          reportSkipping(true),
+          useConfig(false) {}
+    TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType,
+               bool reusable, bool reportSkipping)
+        : executor(executor),
+          measureTiming(measureTiming),
+          outputType(outputType),
+          memoryType(memoryType),
+          reusable(reusable),
+          reportSkipping(reportSkipping),
+          useConfig(false) {}
+    TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType,
+               bool reusable, bool reportSkipping, bool useConfig)
+        : executor(executor),
+          measureTiming(measureTiming),
+          outputType(outputType),
+          memoryType(memoryType),
+          reusable(reusable),
+          reportSkipping(reportSkipping),
+          useConfig(useConfig) {}
 };
 
+std::string toString(OutputType type) {
+    switch (type) {
+        case OutputType::FULLY_SPECIFIED:
+            return "FULLY_SPECIFIED";
+        case OutputType::UNSPECIFIED:
+            return "UNSPECIFIED";
+        case OutputType::INSUFFICIENT:
+            return "INSUFFICIENT";
+        case OutputType::MISSED_DEADLINE:
+            return "MISSED_DEADLINE";
+    }
+}
+
+std::string toString(const TestConfig& config) {
+    std::stringstream ss;
+    ss << "TestConfig{.executor=" << toString(config.executor)
+       << ", .measureTiming=" << (config.measureTiming ? "true" : "false")
+       << ", .outputType=" << toString(config.outputType)
+       << ", .memoryType=" << toString(config.memoryType)
+       << ", .reusable=" << (config.reusable ? "true" : "false")
+       << ", .useConfig=" << (config.useConfig ? "true" : "false") << "}";
+    return ss.str();
+}
+
 enum class IOType { INPUT, OUTPUT };
 
 class DeviceMemoryAllocator {
@@ -558,209 +599,266 @@
         loopTimeoutDurationNs = 1 * kMillisecond;
     }
 
-    ErrorStatus executionStatus;
-    std::vector<OutputShape> outputShapes;
-    Timing timing = kNoTiming;
-    switch (testConfig.executor) {
-        case Executor::SYNC: {
-            SCOPED_TRACE("synchronous");
+    std::shared_ptr<IExecution> execution;
+    if (testConfig.reusable) {
+        const auto ret = preparedModel->createReusableExecution(
+                request, {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}}, &execution);
+        ASSERT_TRUE(ret.isOk()) << static_cast<nn::ErrorStatus>(ret.getServiceSpecificError());
+        ASSERT_NE(nullptr, execution.get());
+    }
 
-            ExecutionResult executionResult;
-            // execute
-            const auto ret = preparedModel->executeSynchronously(request, testConfig.measureTiming,
-                                                                 kNoDeadline, loopTimeoutDurationNs,
-                                                                 &executionResult);
-            ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
-                    << ret.getDescription();
-            if (ret.isOk()) {
-                executionStatus = executionResult.outputSufficientSize
-                                          ? ErrorStatus::NONE
-                                          : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
-                outputShapes = std::move(executionResult.outputShapes);
-                timing = executionResult.timing;
-            } else {
-                executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
-            }
-            break;
-        }
-        case Executor::BURST: {
-            SCOPED_TRACE("burst");
+    const auto executeAndCheckResults = [&preparedModel, &execution, &testConfig, &testModel,
+                                         &context, &request, loopTimeoutDurationNs, skipped]() {
+        ErrorStatus executionStatus;
+        std::vector<OutputShape> outputShapes;
+        Timing timing = kNoTiming;
+        switch (testConfig.executor) {
+            case Executor::SYNC: {
+                SCOPED_TRACE("synchronous");
 
-            // create burst
-            std::shared_ptr<IBurst> burst;
-            auto ret = preparedModel->configureExecutionBurst(&burst);
-            ASSERT_TRUE(ret.isOk()) << ret.getDescription();
-            ASSERT_NE(nullptr, burst.get());
-
-            // associate a unique slot with each memory pool
-            int64_t currentSlot = 0;
-            std::vector<int64_t> slots;
-            slots.reserve(request.pools.size());
-            for (const auto& pool : request.pools) {
-                if (pool.getTag() == RequestMemoryPool::Tag::pool) {
-                    slots.push_back(currentSlot++);
+                ExecutionResult executionResult;
+                // execute
+                ::ndk::ScopedAStatus ret;
+                if (testConfig.reusable) {
+                    ret = execution->executeSynchronously(kNoDeadline, &executionResult);
+                } else if (testConfig.useConfig) {
+                    ret = preparedModel->executeSynchronouslyWithConfig(
+                            request, {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}},
+                            kNoDeadline, &executionResult);
                 } else {
-                    EXPECT_EQ(pool.getTag(), RequestMemoryPool::Tag::token);
-                    slots.push_back(-1);
+                    ret = preparedModel->executeSynchronously(request, testConfig.measureTiming,
+                                                              kNoDeadline, loopTimeoutDurationNs,
+                                                              &executionResult);
                 }
+                ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
+                        << ret.getDescription();
+                if (ret.isOk()) {
+                    executionStatus = executionResult.outputSufficientSize
+                                              ? ErrorStatus::NONE
+                                              : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
+                    outputShapes = std::move(executionResult.outputShapes);
+                    timing = executionResult.timing;
+                } else {
+                    executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
+                }
+                break;
             }
+            case Executor::BURST: {
+                SCOPED_TRACE("burst");
 
-            ExecutionResult executionResult;
-            // execute
-            ret = burst->executeSynchronously(request, slots, testConfig.measureTiming, kNoDeadline,
-                                              loopTimeoutDurationNs, &executionResult);
-            ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
-                    << ret.getDescription();
-            if (ret.isOk()) {
-                executionStatus = executionResult.outputSufficientSize
-                                          ? ErrorStatus::NONE
-                                          : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
-                outputShapes = std::move(executionResult.outputShapes);
-                timing = executionResult.timing;
-            } else {
-                executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
-            }
-
-            // Mark each slot as unused after the execution. This is unnecessary because the burst
-            // is freed after this scope ends, but this is here to test the functionality.
-            for (int64_t slot : slots) {
-                ret = burst->releaseMemoryResource(slot);
+                // create burst
+                std::shared_ptr<IBurst> burst;
+                auto ret = preparedModel->configureExecutionBurst(&burst);
                 ASSERT_TRUE(ret.isOk()) << ret.getDescription();
-            }
+                ASSERT_NE(nullptr, burst.get());
 
-            break;
-        }
-        case Executor::FENCED: {
-            SCOPED_TRACE("fenced");
-            ErrorStatus result = ErrorStatus::NONE;
-            FencedExecutionResult executionResult;
-            auto ret = preparedModel->executeFenced(request, {}, testConfig.measureTiming,
-                                                    kNoDeadline, loopTimeoutDurationNs, kNoDuration,
-                                                    &executionResult);
-            ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
-                    << ret.getDescription();
-            if (!ret.isOk()) {
-                result = static_cast<ErrorStatus>(ret.getServiceSpecificError());
-                executionStatus = result;
-            } else if (executionResult.syncFence.get() != -1) {
-                std::vector<ndk::ScopedFileDescriptor> waitFor;
-                auto dupFd = dup(executionResult.syncFence.get());
-                ASSERT_NE(dupFd, -1);
-                waitFor.emplace_back(dupFd);
-                // If a sync fence is returned, try start another run waiting for the sync fence.
-                ret = preparedModel->executeFenced(request, waitFor, testConfig.measureTiming,
-                                                   kNoDeadline, loopTimeoutDurationNs, kNoDuration,
-                                                   &executionResult);
-                ASSERT_TRUE(ret.isOk());
-                waitForSyncFence(executionResult.syncFence.get());
-            }
-            if (result == ErrorStatus::NONE) {
-                ASSERT_NE(executionResult.callback, nullptr);
-                Timing timingFenced;
-                auto ret = executionResult.callback->getExecutionInfo(&timing, &timingFenced,
-                                                                      &executionStatus);
-                ASSERT_TRUE(ret.isOk());
-            }
-            break;
-        }
-        default: {
-            FAIL() << "Unsupported execution mode for AIDL interface.";
-        }
-    }
-
-    if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
-        executionStatus == ErrorStatus::GENERAL_FAILURE) {
-        if (skipped != nullptr) {
-            *skipped = true;
-        }
-        if (!testConfig.reportSkipping) {
-            return;
-        }
-        LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
-                     "execute model that it does not support.";
-        std::cout << "[          ]   Early termination of test because vendor service cannot "
-                     "execute model that it does not support."
-                  << std::endl;
-        GTEST_SKIP();
-    }
-    if (!testConfig.measureTiming) {
-        EXPECT_EQ(timing, kNoTiming);
-    } else {
-        if (timing.timeOnDeviceNs != -1 && timing.timeInDriverNs != -1) {
-            EXPECT_LE(timing.timeOnDeviceNs, timing.timeInDriverNs);
-        }
-    }
-
-    switch (testConfig.outputType) {
-        case OutputType::FULLY_SPECIFIED:
-            if (testConfig.executor == Executor::FENCED && hasZeroSizedOutput(testModel)) {
-                // Executor::FENCED does not support zero-sized output.
-                ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
-                return;
-            }
-            // If the model output operands are fully specified, outputShapes must be either
-            // either empty, or have the same number of elements as the number of outputs.
-            ASSERT_EQ(ErrorStatus::NONE, executionStatus);
-            ASSERT_TRUE(outputShapes.size() == 0 ||
-                        outputShapes.size() == testModel.main.outputIndexes.size());
-            break;
-        case OutputType::UNSPECIFIED:
-            if (testConfig.executor == Executor::FENCED) {
-                // For Executor::FENCED, the output shape must be fully specified.
-                ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
-                return;
-            }
-            // If the model output operands are not fully specified, outputShapes must have
-            // the same number of elements as the number of outputs.
-            ASSERT_EQ(ErrorStatus::NONE, executionStatus);
-            ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
-            break;
-        case OutputType::INSUFFICIENT:
-            if (testConfig.executor == Executor::FENCED) {
-                // For Executor::FENCED, the output shape must be fully specified.
-                ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
-                return;
-            }
-            ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
-            ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
-            // Check that all returned output dimensions are at least as fully specified as the
-            // union of the information about the corresponding operand in the model and in the
-            // request. In this test, all model outputs have known rank with all dimensions
-            // unspecified, and no dimensional information is provided in the request.
-            for (uint32_t i = 0; i < outputShapes.size(); i++) {
-                ASSERT_EQ(outputShapes[i].isSufficient, i != kInsufficientOutputIndex);
-                const auto& actual = outputShapes[i].dimensions;
-                const auto& golden =
-                        testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
-                ASSERT_EQ(actual.size(), golden.size());
-                for (uint32_t j = 0; j < actual.size(); j++) {
-                    if (actual[j] == 0) continue;
-                    EXPECT_EQ(actual[j], golden[j]) << "index: " << j;
+                // associate a unique slot with each memory pool
+                int64_t currentSlot = 0;
+                std::vector<int64_t> slots;
+                slots.reserve(request.pools.size());
+                for (const auto& pool : request.pools) {
+                    if (pool.getTag() == RequestMemoryPool::Tag::pool) {
+                        slots.push_back(currentSlot++);
+                    } else {
+                        EXPECT_EQ(pool.getTag(), RequestMemoryPool::Tag::token);
+                        slots.push_back(-1);
+                    }
                 }
+
+                ExecutionResult executionResult;
+                // execute
+                if (testConfig.useConfig) {
+                    ret = burst->executeSynchronouslyWithConfig(
+                            request, slots,
+                            {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}}, kNoDeadline,
+                            &executionResult);
+                } else {
+                    ret = burst->executeSynchronously(request, slots, testConfig.measureTiming,
+                                                      kNoDeadline, loopTimeoutDurationNs,
+                                                      &executionResult);
+                }
+                ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
+                        << ret.getDescription();
+                if (ret.isOk()) {
+                    executionStatus = executionResult.outputSufficientSize
+                                              ? ErrorStatus::NONE
+                                              : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
+                    outputShapes = std::move(executionResult.outputShapes);
+                    timing = executionResult.timing;
+                } else {
+                    executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
+                }
+
+                // Mark each slot as unused after the execution. This is unnecessary because the
+                // burst is freed after this scope ends, but this is here to test the functionality.
+                for (int64_t slot : slots) {
+                    ret = burst->releaseMemoryResource(slot);
+                    ASSERT_TRUE(ret.isOk()) << ret.getDescription();
+                }
+
+                break;
             }
-            return;
-        case OutputType::MISSED_DEADLINE:
-            ASSERT_TRUE(executionStatus == ErrorStatus::MISSED_DEADLINE_TRANSIENT ||
-                        executionStatus == ErrorStatus::MISSED_DEADLINE_PERSISTENT)
-                    << "executionStatus = " << executionStatus;
-            return;
+            case Executor::FENCED: {
+                SCOPED_TRACE("fenced");
+                ErrorStatus result = ErrorStatus::NONE;
+                FencedExecutionResult executionResult;
+                ::ndk::ScopedAStatus ret;
+                if (testConfig.reusable) {
+                    ret = execution->executeFenced({}, kNoDeadline, kNoDuration, &executionResult);
+                } else if (testConfig.useConfig) {
+                    ret = preparedModel->executeFencedWithConfig(
+                            request, {}, {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}},
+                            kNoDeadline, kNoDuration, &executionResult);
+                } else {
+                    ret = preparedModel->executeFenced(request, {}, testConfig.measureTiming,
+                                                       kNoDeadline, loopTimeoutDurationNs,
+                                                       kNoDuration, &executionResult);
+                }
+                ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
+                        << ret.getDescription();
+                if (!ret.isOk()) {
+                    result = static_cast<ErrorStatus>(ret.getServiceSpecificError());
+                    executionStatus = result;
+                } else if (executionResult.syncFence.get() != -1) {
+                    std::vector<ndk::ScopedFileDescriptor> waitFor;
+                    auto dupFd = dup(executionResult.syncFence.get());
+                    ASSERT_NE(dupFd, -1);
+                    waitFor.emplace_back(dupFd);
+                    // If a sync fence is returned, try start another run waiting for the sync
+                    // fence.
+                    if (testConfig.reusable) {
+                        ret = execution->executeFenced(waitFor, kNoDeadline, kNoDuration,
+                                                       &executionResult);
+                    } else if (testConfig.useConfig) {
+                        ret = preparedModel->executeFencedWithConfig(
+                                request, waitFor,
+                                {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}},
+                                kNoDeadline, kNoDuration, &executionResult);
+                    } else {
+                        ret = preparedModel->executeFenced(
+                                request, waitFor, testConfig.measureTiming, kNoDeadline,
+                                loopTimeoutDurationNs, kNoDuration, &executionResult);
+                    }
+                    ASSERT_TRUE(ret.isOk());
+                    waitForSyncFence(executionResult.syncFence.get());
+                }
+                if (result == ErrorStatus::NONE) {
+                    ASSERT_NE(executionResult.callback, nullptr);
+                    Timing timingFenced;
+                    auto ret = executionResult.callback->getExecutionInfo(&timing, &timingFenced,
+                                                                          &executionStatus);
+                    ASSERT_TRUE(ret.isOk());
+                }
+                break;
+            }
+            default: {
+                FAIL() << "Unsupported execution mode for AIDL interface.";
+            }
+        }
+
+        if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
+            executionStatus == ErrorStatus::GENERAL_FAILURE) {
+            if (skipped != nullptr) {
+                *skipped = true;
+            }
+            if (!testConfig.reportSkipping) {
+                return;
+            }
+            LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
+                         "execute model that it does not support.";
+            std::cout << "[          ]   Early termination of test because vendor service cannot "
+                         "execute model that it does not support."
+                      << std::endl;
+            GTEST_SKIP();
+        }
+        if (!testConfig.measureTiming) {
+            EXPECT_EQ(timing, kNoTiming);
+        } else {
+            if (timing.timeOnDeviceNs != -1 && timing.timeInDriverNs != -1) {
+                EXPECT_LE(timing.timeOnDeviceNs, timing.timeInDriverNs);
+            }
+        }
+
+        switch (testConfig.outputType) {
+            case OutputType::FULLY_SPECIFIED:
+                if (testConfig.executor == Executor::FENCED && hasZeroSizedOutput(testModel)) {
+                    // Executor::FENCED does not support zero-sized output.
+                    ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
+                    return;
+                }
+                // If the model output operands are fully specified, outputShapes must be either
+                // either empty, or have the same number of elements as the number of outputs.
+                ASSERT_EQ(ErrorStatus::NONE, executionStatus);
+                ASSERT_TRUE(outputShapes.size() == 0 ||
+                            outputShapes.size() == testModel.main.outputIndexes.size());
+                break;
+            case OutputType::UNSPECIFIED:
+                if (testConfig.executor == Executor::FENCED) {
+                    // For Executor::FENCED, the output shape must be fully specified.
+                    ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
+                    return;
+                }
+                // If the model output operands are not fully specified, outputShapes must have
+                // the same number of elements as the number of outputs.
+                ASSERT_EQ(ErrorStatus::NONE, executionStatus);
+                ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
+                break;
+            case OutputType::INSUFFICIENT:
+                if (testConfig.executor == Executor::FENCED) {
+                    // For Executor::FENCED, the output shape must be fully specified.
+                    ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
+                    return;
+                }
+                ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
+                ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
+                // Check that all returned output dimensions are at least as fully specified as the
+                // union of the information about the corresponding operand in the model and in the
+                // request. In this test, all model outputs have known rank with all dimensions
+                // unspecified, and no dimensional information is provided in the request.
+                for (uint32_t i = 0; i < outputShapes.size(); i++) {
+                    ASSERT_EQ(outputShapes[i].isSufficient, i != kInsufficientOutputIndex);
+                    const auto& actual = outputShapes[i].dimensions;
+                    const auto& golden =
+                            testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
+                    ASSERT_EQ(actual.size(), golden.size());
+                    for (uint32_t j = 0; j < actual.size(); j++) {
+                        if (actual[j] == 0) continue;
+                        EXPECT_EQ(actual[j], golden[j]) << "index: " << j;
+                    }
+                }
+                return;
+            case OutputType::MISSED_DEADLINE:
+                ASSERT_TRUE(executionStatus == ErrorStatus::MISSED_DEADLINE_TRANSIENT ||
+                            executionStatus == ErrorStatus::MISSED_DEADLINE_PERSISTENT)
+                        << "executionStatus = " << executionStatus;
+                return;
+        }
+
+        // Go through all outputs, check returned output shapes.
+        for (uint32_t i = 0; i < outputShapes.size(); i++) {
+            EXPECT_TRUE(outputShapes[i].isSufficient);
+            const auto& expect =
+                    testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
+            const auto unsignedActual = nn::toUnsigned(outputShapes[i].dimensions);
+            ASSERT_TRUE(unsignedActual.has_value());
+            const std::vector<uint32_t>& actual = unsignedActual.value();
+            EXPECT_EQ(expect, actual);
+        }
+
+        // Retrieve execution results.
+        const std::vector<TestBuffer> outputs = context.getOutputBuffers(testModel, request);
+
+        // We want "close-enough" results.
+        checkResults(testModel, outputs);
+    };
+
+    executeAndCheckResults();
+
+    // For reusable execution tests, run the execution twice.
+    if (testConfig.reusable) {
+        SCOPED_TRACE("Second execution");
+        executeAndCheckResults();
     }
-
-    // Go through all outputs, check returned output shapes.
-    for (uint32_t i = 0; i < outputShapes.size(); i++) {
-        EXPECT_TRUE(outputShapes[i].isSufficient);
-        const auto& expect = testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
-        const auto unsignedActual = nn::toUnsigned(outputShapes[i].dimensions);
-        ASSERT_TRUE(unsignedActual.has_value());
-        const std::vector<uint32_t>& actual = unsignedActual.value();
-        EXPECT_EQ(expect, actual);
-    }
-
-    // Retrieve execution results.
-    const std::vector<TestBuffer> outputs = context.getOutputBuffers(testModel, request);
-
-    // We want "close-enough" results.
-    checkResults(testModel, outputs);
 }
 
 void EvaluatePreparedModel(const std::shared_ptr<IDevice>& device,
@@ -770,6 +868,15 @@
     std::vector<bool> measureTimingList;
     std::vector<Executor> executorList;
     std::vector<MemoryType> memoryTypeList;
+    std::vector<bool> reusableList = {false};
+    std::vector<bool> useConfigList = {false};
+
+    int deviceVersion;
+    ASSERT_TRUE(device->getInterfaceVersion(&deviceVersion).isOk());
+    if (deviceVersion >= kMinAidlLevelForFL8) {
+        reusableList.push_back(true);
+        useConfigList.push_back(true);
+    }
 
     switch (testKind) {
         case TestKind::GENERAL: {
@@ -812,8 +919,16 @@
         for (const bool measureTiming : measureTimingList) {
             for (const Executor executor : executorList) {
                 for (const MemoryType memoryType : memoryTypeList) {
-                    const TestConfig testConfig(executor, measureTiming, outputType, memoryType);
-                    EvaluatePreparedModel(device, preparedModel, testModel, testConfig);
+                    for (const bool reusable : reusableList) {
+                        for (const bool useConfig : useConfigList) {
+                            if ((useConfig || executor == Executor::BURST) && reusable) continue;
+                            const TestConfig testConfig(executor, measureTiming, outputType,
+                                                        memoryType, reusable,
+                                                        /*reportSkipping=*/true, useConfig);
+                            SCOPED_TRACE(toString(testConfig));
+                            EvaluatePreparedModel(device, preparedModel, testModel, testConfig);
+                        }
+                    }
                 }
             }
         }
@@ -833,7 +948,7 @@
         for (const bool measureTiming : measureTimingList) {
             for (const Executor executor : executorList) {
                 const TestConfig testConfig(executor, measureTiming, outputType, MemoryType::ASHMEM,
-                                            /*reportSkipping=*/false);
+                                            /*reusable=*/false, /*reportSkipping=*/false);
                 bool baseSkipped = false;
                 EvaluatePreparedModel(device, preparedModel, testModel, testConfig, &baseSkipped);
                 bool coupledSkipped = false;
@@ -871,6 +986,13 @@
             createPreparedModel(device, model, &preparedModel);
             if (preparedModel == nullptr) return;
             EvaluatePreparedModel(device, preparedModel, testModel, testKind);
+            int32_t deviceVersion;
+            ASSERT_TRUE(device->getInterfaceVersion(&deviceVersion).isOk());
+            if (deviceVersion >= kMinAidlLevelForFL8) {
+                createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ true,
+                                    /*useConfig*/ true);
+                EvaluatePreparedModel(device, preparedModel, testModel, testKind);
+            }
         } break;
         case TestKind::QUANTIZATION_COUPLING: {
             ASSERT_TRUE(testModel.hasQuant8CoupledOperands());
diff --git a/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp b/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp
index cd5475c..97760ae 100644
--- a/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp
+++ b/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp
@@ -204,10 +204,27 @@
         return ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
     }
+    ndk::ScopedAStatus executeSynchronouslyWithConfig(const Request&, const ExecutionConfig&,
+                                                      int64_t, ExecutionResult*) override {
+        return ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+    }
+    ndk::ScopedAStatus executeFencedWithConfig(const Request&,
+                                               const std::vector<ndk::ScopedFileDescriptor>&,
+                                               const ExecutionConfig&, int64_t, int64_t,
+                                               FencedExecutionResult*) override {
+        return ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+    }
     ndk::ScopedAStatus configureExecutionBurst(std::shared_ptr<IBurst>*) override {
         return ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
     }
+    ndk::ScopedAStatus createReusableExecution(const aidl_hal::Request&, const ExecutionConfig&,
+                                               std::shared_ptr<aidl_hal::IExecution>*) override {
+        return ndk::ScopedAStatus::fromServiceSpecificError(
+                static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+    }
 };
 
 template <typename... Args>
diff --git a/neuralnetworks/aidl/vts/functional/Utils.cpp b/neuralnetworks/aidl/vts/functional/Utils.cpp
index 325a436..efd5bca 100644
--- a/neuralnetworks/aidl/vts/functional/Utils.cpp
+++ b/neuralnetworks/aidl/vts/functional/Utils.cpp
@@ -177,6 +177,17 @@
     return os << toString(errorStatus);
 }
 
+std::string toString(MemoryType type) {
+    switch (type) {
+        case MemoryType::ASHMEM:
+            return "ASHMEM";
+        case MemoryType::BLOB_AHWB:
+            return "BLOB_AHWB";
+        case MemoryType::DEVICE:
+            return "DEVICE";
+    }
+}
+
 Request ExecutionContext::createRequest(const TestModel& testModel, MemoryType memoryType) {
     CHECK(memoryType == MemoryType::ASHMEM || memoryType == MemoryType::BLOB_AHWB);
 
diff --git a/neuralnetworks/aidl/vts/functional/Utils.h b/neuralnetworks/aidl/vts/functional/Utils.h
index ca81418..0db3f8c 100644
--- a/neuralnetworks/aidl/vts/functional/Utils.h
+++ b/neuralnetworks/aidl/vts/functional/Utils.h
@@ -111,6 +111,8 @@
 
 enum class MemoryType { ASHMEM, BLOB_AHWB, DEVICE };
 
+std::string toString(MemoryType type);
+
 // Manages the lifetime of memory resources used in an execution.
 class ExecutionContext {
     DISALLOW_COPY_AND_ASSIGN(ExecutionContext);
diff --git a/neuralnetworks/aidl/vts/functional/ValidateModel.cpp b/neuralnetworks/aidl/vts/functional/ValidateModel.cpp
index fdc7eff..931ba25 100644
--- a/neuralnetworks/aidl/vts/functional/ValidateModel.cpp
+++ b/neuralnetworks/aidl/vts/functional/ValidateModel.cpp
@@ -77,6 +77,28 @@
     ASSERT_EQ(nullptr, preparedModel.get());
 }
 
+static void validatePrepareModelWithConfig(const std::shared_ptr<IDevice>& device,
+                                           const std::string& message, const Model& model,
+                                           ExecutionPreference preference, Priority priority) {
+    SCOPED_TRACE(message + " [prepareModelWithConfig]");
+
+    std::shared_ptr<PreparedModelCallback> preparedModelCallback =
+            ndk::SharedRefBase::make<PreparedModelCallback>();
+    const auto prepareLaunchStatus = device->prepareModelWithConfig(
+            model, {preference, priority, kNoDeadline, {}, {}, kEmptyCacheToken, {}, {}},
+            preparedModelCallback);
+    ASSERT_FALSE(prepareLaunchStatus.isOk());
+    ASSERT_EQ(prepareLaunchStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+    ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus.getServiceSpecificError()),
+              ErrorStatus::INVALID_ARGUMENT);
+
+    preparedModelCallback->wait();
+    ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
+    ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, prepareReturnStatus);
+    std::shared_ptr<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
+    ASSERT_EQ(nullptr, preparedModel.get());
+}
+
 static bool validExecutionPreference(ExecutionPreference preference) {
     return preference == ExecutionPreference::LOW_POWER ||
            preference == ExecutionPreference::FAST_SINGLE_ANSWER ||
@@ -103,6 +125,13 @@
     }
 
     validatePrepareModel(device, message, model, preference, priority);
+
+    int32_t aidlVersion;
+    ASSERT_TRUE(device->getInterfaceVersion(&aidlVersion).isOk());
+    if (aidlVersion >= kMinAidlLevelForFL8) {
+        // prepareModelWithConfig must satisfy all requirements enforced by prepareModel.
+        validatePrepareModelWithConfig(device, message, model, preference, priority);
+    }
 }
 
 static uint32_t addOperand(Model* model) {
diff --git a/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp b/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp
index 29e2471..d749841 100644
--- a/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp
+++ b/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp
@@ -36,6 +36,51 @@
 
 ///////////////////////// UTILITY FUNCTIONS /////////////////////////
 
+// Test request validation with reusable execution.
+static void validateReusableExecution(const std::shared_ptr<IPreparedModel>& preparedModel,
+                                      const std::string& message, const Request& request,
+                                      bool measure) {
+    // createReusableExecution
+    std::shared_ptr<IExecution> execution;
+    {
+        SCOPED_TRACE(message + " [createReusableExecution]");
+        const auto createStatus = preparedModel->createReusableExecution(
+                request, {measure, kOmittedTimeoutDuration, {}, {}}, &execution);
+        if (!createStatus.isOk()) {
+            ASSERT_EQ(createStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+            ASSERT_EQ(static_cast<ErrorStatus>(createStatus.getServiceSpecificError()),
+                      ErrorStatus::INVALID_ARGUMENT);
+            ASSERT_EQ(nullptr, execution);
+            return;
+        } else {
+            ASSERT_NE(nullptr, execution);
+        }
+    }
+
+    // synchronous
+    {
+        SCOPED_TRACE(message + " [executeSynchronously]");
+        ExecutionResult executionResult;
+        const auto executeStatus = execution->executeSynchronously(kNoDeadline, &executionResult);
+        ASSERT_FALSE(executeStatus.isOk());
+        ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+        ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+                  ErrorStatus::INVALID_ARGUMENT);
+    }
+
+    // fenced
+    {
+        SCOPED_TRACE(message + " [executeFenced]");
+        FencedExecutionResult executionResult;
+        const auto executeStatus =
+                execution->executeFenced({}, kNoDeadline, kNoDuration, &executionResult);
+        ASSERT_FALSE(executeStatus.isOk());
+        ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+        ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+                  ErrorStatus::INVALID_ARGUMENT);
+    }
+}
+
 // Primary validation function. This function will take a valid request, apply a
 // mutation to it to invalidate the request, then pass it to interface calls
 // that use the request.
@@ -101,6 +146,63 @@
         ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
                   ErrorStatus::INVALID_ARGUMENT);
     }
+
+    int32_t aidlVersion;
+    ASSERT_TRUE(preparedModel->getInterfaceVersion(&aidlVersion).isOk());
+    if (aidlVersion < kMinAidlLevelForFL8) {
+        return;
+    }
+
+    // validate reusable execution
+    validateReusableExecution(preparedModel, message, request, measure);
+
+    // synchronous with empty hints
+    {
+        SCOPED_TRACE(message + " [executeSynchronouslyWithConfig]");
+        ExecutionResult executionResult;
+        const auto executeStatus = preparedModel->executeSynchronouslyWithConfig(
+                request, {measure, kOmittedTimeoutDuration, {}, {}}, kNoDeadline, &executionResult);
+        ASSERT_FALSE(executeStatus.isOk());
+        ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+        ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+                  ErrorStatus::INVALID_ARGUMENT);
+    }
+
+    // fenced with empty hints
+    {
+        SCOPED_TRACE(message + " [executeFencedWithConfig]");
+        FencedExecutionResult executionResult;
+        const auto executeStatus = preparedModel->executeFencedWithConfig(
+                request, {}, {false, kOmittedTimeoutDuration, {}, {}}, kNoDeadline, kNoDuration,
+                &executionResult);
+        ASSERT_FALSE(executeStatus.isOk());
+        ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+        ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+                  ErrorStatus::INVALID_ARGUMENT);
+    }
+
+    // burst with empty hints
+    {
+        SCOPED_TRACE(message + " [burst executeSynchronouslyWithConfig]");
+
+        // create burst
+        std::shared_ptr<IBurst> burst;
+        auto ret = preparedModel->configureExecutionBurst(&burst);
+        ASSERT_TRUE(ret.isOk()) << ret.getDescription();
+        ASSERT_NE(nullptr, burst.get());
+
+        // use -1 for all memory identifier tokens
+        const std::vector<int64_t> slots(request.pools.size(), -1);
+
+        ExecutionResult executionResult;
+        const auto executeStatus = burst->executeSynchronouslyWithConfig(
+                request, slots, {measure, kOmittedTimeoutDuration, {}, {}}, kNoDeadline,
+                &executionResult);
+        ASSERT_FALSE(executeStatus.isOk());
+        ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+        ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+                  ErrorStatus::INVALID_ARGUMENT);
+    }
 }
 
 std::shared_ptr<IBurst> createBurst(const std::shared_ptr<IPreparedModel>& preparedModel) {
diff --git a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp
index c417356..ad93e6d 100644
--- a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp
+++ b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp
@@ -41,7 +41,8 @@
 
 // internal helper function
 void createPreparedModel(const std::shared_ptr<IDevice>& device, const Model& model,
-                         std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping) {
+                         std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping,
+                         bool useConfig) {
     ASSERT_NE(nullptr, preparedModel);
     *preparedModel = nullptr;
 
@@ -56,11 +57,25 @@
     // launch prepare model
     const std::shared_ptr<PreparedModelCallback> preparedModelCallback =
             ndk::SharedRefBase::make<PreparedModelCallback>();
-    const auto prepareLaunchStatus =
-            device->prepareModel(model, ExecutionPreference::FAST_SINGLE_ANSWER, kDefaultPriority,
-                                 kNoDeadline, {}, {}, kEmptyCacheToken, preparedModelCallback);
-    ASSERT_TRUE(prepareLaunchStatus.isOk()) << prepareLaunchStatus.getDescription();
-
+    if (useConfig) {
+        const auto prepareLaunchStatus =
+                device->prepareModelWithConfig(model,
+                                               {ExecutionPreference::FAST_SINGLE_ANSWER,
+                                                kDefaultPriority,
+                                                kNoDeadline,
+                                                {},
+                                                {},
+                                                kEmptyCacheToken,
+                                                {},
+                                                {}},
+                                               preparedModelCallback);
+        ASSERT_TRUE(prepareLaunchStatus.isOk()) << prepareLaunchStatus.getDescription();
+    } else {
+        const auto prepareLaunchStatus = device->prepareModel(
+                model, ExecutionPreference::FAST_SINGLE_ANSWER, kDefaultPriority, kNoDeadline, {},
+                {}, kEmptyCacheToken, preparedModelCallback);
+        ASSERT_TRUE(prepareLaunchStatus.isOk()) << prepareLaunchStatus.getDescription();
+    }
     // retrieve prepared model
     preparedModelCallback->wait();
     const ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
diff --git a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h
index 4312d3a..00d705c 100644
--- a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h
+++ b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h
@@ -30,6 +30,8 @@
 using NamedDevice = Named<std::shared_ptr<IDevice>>;
 using NeuralNetworksAidlTestParam = NamedDevice;
 
+constexpr int kMinAidlLevelForFL8 = 4;
+
 class NeuralNetworksAidlTest : public testing::TestWithParam<NeuralNetworksAidlTestParam> {
   protected:
     void SetUp() override;
@@ -49,8 +51,8 @@
 // Create an IPreparedModel object. If the model cannot be prepared,
 // "preparedModel" will be nullptr instead.
 void createPreparedModel(const std::shared_ptr<IDevice>& device, const Model& model,
-                         std::shared_ptr<IPreparedModel>* preparedModel,
-                         bool reportSkipping = true);
+                         std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping = true,
+                         bool useConfig = false);
 
 enum class Executor { SYNC, BURST, FENCED };
 
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h
index f2687c4..8d42e2f 100644
--- a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h
@@ -46,6 +46,11 @@
                                             bool measureTiming, int64_t deadlineNs,
                                             int64_t loopTimeoutDurationNs,
                                             ExecutionResult* executionResult) override;
+    ndk::ScopedAStatus executeSynchronouslyWithConfig(
+            const Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
+            const ExecutionConfig& config, int64_t deadlineNs,
+            ExecutionResult* executionResult) override;
+
     ndk::ScopedAStatus releaseMemoryResource(int64_t memoryIdentifierToken) override;
 
     class ThreadSafeMemoryCache {
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h
index aa29d63..c94f270 100644
--- a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h
@@ -31,6 +31,7 @@
 #include <aidl/android/hardware/neuralnetworks/IPreparedModelParcel.h>
 #include <aidl/android/hardware/neuralnetworks/Model.h>
 #include <aidl/android/hardware/neuralnetworks/NumberOfCacheFiles.h>
+#include <aidl/android/hardware/neuralnetworks/PrepareModelConfig.h>
 #include <aidl/android/hardware/neuralnetworks/Priority.h>
 #include <android/binder_auto_utils.h>
 #include <nnapi/IDevice.h>
@@ -72,6 +73,9 @@
             const std::vector<ndk::ScopedFileDescriptor>& dataCache,
             const std::vector<uint8_t>& token,
             const std::shared_ptr<IPreparedModelCallback>& callback) override;
+    ndk::ScopedAStatus prepareModelWithConfig(
+            const Model& model, const PrepareModelConfig& config,
+            const std::shared_ptr<IPreparedModelCallback>& callback) override;
 
   protected:
     const ::android::nn::SharedDevice kDevice;
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Execution.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Execution.h
new file mode 100644
index 0000000..6a9ac57
--- /dev/null
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Execution.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_ADAPTER_AIDL_EXECUTION_H
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_ADAPTER_AIDL_EXECUTION_H
+
+#include "nnapi/hal/aidl/Adapter.h"
+
+#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
+#include <aidl/android/hardware/neuralnetworks/ExecutionResult.h>
+#include <aidl/android/hardware/neuralnetworks/FencedExecutionResult.h>
+#include <aidl/android/hardware/neuralnetworks/IExecution.h>
+#include <aidl/android/hardware/neuralnetworks/Request.h>
+#include <android/binder_auto_utils.h>
+#include <nnapi/IExecution.h>
+#include <nnapi/Types.h>
+
+#include <memory>
+#include <vector>
+
+// See hardware/interfaces/neuralnetworks/utils/README.md for more information on AIDL interface
+// lifetimes across processes and for protecting asynchronous calls across AIDL.
+
+namespace aidl::android::hardware::neuralnetworks::adapter {
+
+// Class that adapts nn::IExecution to BnExecution.
+class Execution : public BnExecution {
+  public:
+    explicit Execution(::android::nn::SharedExecution execution);
+
+    ndk::ScopedAStatus executeSynchronously(int64_t deadlineNs,
+                                            ExecutionResult* executionResult) override;
+    ndk::ScopedAStatus executeFenced(const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+                                     int64_t deadlineNs, int64_t durationNs,
+                                     FencedExecutionResult* fencedExecutionResult) override;
+
+  protected:
+    const ::android::nn::SharedExecution kExecution;
+};
+
+}  // namespace aidl::android::hardware::neuralnetworks::adapter
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_ADAPTER_AIDL_EXECUTION_H
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h
index 93e0427..d1359d6 100644
--- a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h
@@ -23,6 +23,7 @@
 #include <aidl/android/hardware/neuralnetworks/ExecutionResult.h>
 #include <aidl/android/hardware/neuralnetworks/FencedExecutionResult.h>
 #include <aidl/android/hardware/neuralnetworks/IBurst.h>
+#include <aidl/android/hardware/neuralnetworks/IExecution.h>
 #include <aidl/android/hardware/neuralnetworks/Request.h>
 #include <android/binder_auto_utils.h>
 #include <nnapi/IPreparedModel.h>
@@ -50,6 +51,17 @@
                                      int64_t loopTimeoutDurationNs, int64_t durationNs,
                                      FencedExecutionResult* executionResult) override;
     ndk::ScopedAStatus configureExecutionBurst(std::shared_ptr<IBurst>* burst) override;
+    ndk::ScopedAStatus createReusableExecution(const Request& request,
+                                               const ExecutionConfig& config,
+                                               std::shared_ptr<IExecution>* execution) override;
+    ndk::ScopedAStatus executeSynchronouslyWithConfig(const Request& request,
+                                                      const ExecutionConfig& config,
+                                                      int64_t deadlineNs,
+                                                      ExecutionResult* executionResult) override;
+    ndk::ScopedAStatus executeFencedWithConfig(
+            const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+            const ExecutionConfig& config, int64_t deadlineNs, int64_t durationNs,
+            FencedExecutionResult* executionResult) override;
 
     ::android::nn::SharedPreparedModel getUnderlyingPreparedModel() const;
 
diff --git a/neuralnetworks/utils/adapter/aidl/src/Burst.cpp b/neuralnetworks/utils/adapter/aidl/src/Burst.cpp
index 4fabb20..a4a80fa 100644
--- a/neuralnetworks/utils/adapter/aidl/src/Burst.cpp
+++ b/neuralnetworks/utils/adapter/aidl/src/Burst.cpp
@@ -93,7 +93,8 @@
 nn::ExecutionResult<ExecutionResult> executeSynchronously(
         const nn::IBurst& burst, const Burst::ThreadSafeMemoryCache& cache, const Request& request,
         const std::vector<int64_t>& memoryIdentifierTokens, bool measureTiming, int64_t deadlineNs,
-        int64_t loopTimeoutDurationNs) {
+        int64_t loopTimeoutDurationNs, const std::vector<TokenValuePair>& hints,
+        const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
     if (request.pools.size() != memoryIdentifierTokens.size()) {
         return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
                << "request.pools.size() != memoryIdentifierTokens.size()";
@@ -107,11 +108,13 @@
     const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
     const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
     const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
+    auto nnHints = NN_TRY(convertInput(hints));
+    auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
 
     const auto hold = ensureAllMemoriesAreCached(&nnRequest, memoryIdentifierTokens, burst, cache);
 
-    const auto result =
-            burst.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration);
+    const auto result = burst.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration,
+                                      nnHints, nnExtensionNameToPrefix);
 
     if (!result.ok() && result.error().code == nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE) {
         const auto& [message, code, outputShapes] = result.error();
@@ -155,7 +158,24 @@
                                                ExecutionResult* executionResult) {
     auto result =
             adapter::executeSynchronously(*kBurst, kMemoryCache, request, memoryIdentifierTokens,
-                                          measureTiming, deadlineNs, loopTimeoutDurationNs);
+                                          measureTiming, deadlineNs, loopTimeoutDurationNs, {}, {});
+    if (!result.has_value()) {
+        auto [message, code, _] = std::move(result).error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    *executionResult = std::move(result).value();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus Burst::executeSynchronouslyWithConfig(
+        const Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
+        const ExecutionConfig& config, int64_t deadlineNs, ExecutionResult* executionResult) {
+    auto result = adapter::executeSynchronously(
+            *kBurst, kMemoryCache, request, memoryIdentifierTokens, config.measureTiming,
+            deadlineNs, config.loopTimeoutDurationNs, config.executionHints,
+            config.extensionNameToPrefix);
     if (!result.has_value()) {
         auto [message, code, _] = std::move(result).error();
         const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
diff --git a/neuralnetworks/utils/adapter/aidl/src/Device.cpp b/neuralnetworks/utils/adapter/aidl/src/Device.cpp
index 763be7f..84aaddb 100644
--- a/neuralnetworks/utils/adapter/aidl/src/Device.cpp
+++ b/neuralnetworks/utils/adapter/aidl/src/Device.cpp
@@ -148,13 +148,14 @@
     }
 }
 
-nn::GeneralResult<void> prepareModel(const nn::SharedDevice& device, const Executor& executor,
-                                     const Model& model, ExecutionPreference preference,
-                                     Priority priority, int64_t deadlineNs,
-                                     const std::vector<ndk::ScopedFileDescriptor>& modelCache,
-                                     const std::vector<ndk::ScopedFileDescriptor>& dataCache,
-                                     const std::vector<uint8_t>& token,
-                                     const std::shared_ptr<IPreparedModelCallback>& callback) {
+nn::GeneralResult<void> prepareModel(
+        const nn::SharedDevice& device, const Executor& executor, const Model& model,
+        ExecutionPreference preference, Priority priority, int64_t deadlineNs,
+        const std::vector<ndk::ScopedFileDescriptor>& modelCache,
+        const std::vector<ndk::ScopedFileDescriptor>& dataCache, const std::vector<uint8_t>& token,
+        const std::vector<TokenValuePair>& hints,
+        const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix,
+        const std::shared_ptr<IPreparedModelCallback>& callback) {
     if (callback.get() == nullptr) {
         return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT) << "Invalid callback";
     }
@@ -166,12 +167,16 @@
     auto nnModelCache = NN_TRY(convertInput(modelCache));
     auto nnDataCache = NN_TRY(convertInput(dataCache));
     const auto nnToken = NN_TRY(convertCacheToken(token));
+    auto nnHints = NN_TRY(convertInput(hints));
+    auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
 
     Task task = [device, nnModel = std::move(nnModel), nnPreference, nnPriority, nnDeadline,
                  nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache),
-                 nnToken, callback] {
-        auto result = device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline,
-                                           nnModelCache, nnDataCache, nnToken);
+                 nnToken, nnHints = std::move(nnHints),
+                 nnExtensionNameToPrefix = std::move(nnExtensionNameToPrefix), callback] {
+        auto result =
+                device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline, nnModelCache,
+                                     nnDataCache, nnToken, nnHints, nnExtensionNameToPrefix);
         notify(callback.get(), std::move(result));
     };
     executor(std::move(task), nnDeadline);
@@ -273,8 +278,9 @@
                                         const std::vector<ndk::ScopedFileDescriptor>& dataCache,
                                         const std::vector<uint8_t>& token,
                                         const std::shared_ptr<IPreparedModelCallback>& callback) {
-    const auto result = adapter::prepareModel(kDevice, kExecutor, model, preference, priority,
-                                              deadlineNs, modelCache, dataCache, token, callback);
+    const auto result =
+            adapter::prepareModel(kDevice, kExecutor, model, preference, priority, deadlineNs,
+                                  modelCache, dataCache, token, {}, {}, callback);
     if (!result.has_value()) {
         const auto& [message, code] = result.error();
         const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
@@ -301,4 +307,21 @@
     return ndk::ScopedAStatus::ok();
 }
 
+ndk::ScopedAStatus Device::prepareModelWithConfig(
+        const Model& model, const PrepareModelConfig& config,
+        const std::shared_ptr<IPreparedModelCallback>& callback) {
+    const auto result = adapter::prepareModel(
+            kDevice, kExecutor, model, config.preference, config.priority, config.deadlineNs,
+            config.modelCache, config.dataCache, config.cacheToken, config.compilationHints,
+            config.extensionNameToPrefix, callback);
+    if (!result.has_value()) {
+        const auto& [message, code] = result.error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        callback->notify(aidlCode, nullptr);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    return ndk::ScopedAStatus::ok();
+}
+
 }  // namespace aidl::android::hardware::neuralnetworks::adapter
diff --git a/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp b/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp
index 71ed1a8..790558f 100644
--- a/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp
+++ b/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp
@@ -17,6 +17,7 @@
 #include "PreparedModel.h"
 
 #include "Burst.h"
+#include "Execution.h"
 
 #include <aidl/android/hardware/neuralnetworks/BnFencedExecutionCallback.h>
 #include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
@@ -26,6 +27,7 @@
 #include <aidl/android/hardware/neuralnetworks/Request.h>
 #include <android-base/logging.h>
 #include <android/binder_auto_utils.h>
+#include <nnapi/IExecution.h>
 #include <nnapi/IPreparedModel.h>
 #include <nnapi/Result.h>
 #include <nnapi/SharedMemory.h>
@@ -116,17 +118,20 @@
     return durationNs < 0 ? nn::OptionalTimePoint{} : nn::TimePoint(makeDuration(durationNs));
 }
 
-nn::ExecutionResult<ExecutionResult> executeSynchronously(const nn::IPreparedModel& preparedModel,
-                                                          const Request& request,
-                                                          bool measureTiming, int64_t deadlineNs,
-                                                          int64_t loopTimeoutDurationNs) {
+nn::ExecutionResult<ExecutionResult> executeSynchronously(
+        const nn::IPreparedModel& preparedModel, const Request& request, bool measureTiming,
+        int64_t deadlineNs, int64_t loopTimeoutDurationNs, const std::vector<TokenValuePair>& hints,
+        const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
     const auto nnRequest = NN_TRY(convertInput(request));
     const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
     const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
     const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
+    auto nnHints = NN_TRY(convertInput(hints));
+    auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
 
     const auto result =
-            preparedModel.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration);
+            preparedModel.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration,
+                                  nnHints, nnExtensionNameToPrefix);
 
     if (!result.ok() && result.error().code == nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE) {
         const auto& [message, code, outputShapes] = result.error();
@@ -145,16 +150,76 @@
 nn::GeneralResult<FencedExecutionResult> executeFenced(
         const nn::IPreparedModel& preparedModel, const Request& request,
         const std::vector<ndk::ScopedFileDescriptor>& waitFor, bool measureTiming,
-        int64_t deadlineNs, int64_t loopTimeoutDurationNs, int64_t durationNs) {
+        int64_t deadlineNs, int64_t loopTimeoutDurationNs, int64_t durationNs,
+        const std::vector<TokenValuePair>& hints,
+        const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
     const auto nnRequest = NN_TRY(convertInput(request));
     const auto nnWaitFor = NN_TRY(convertSyncFences(waitFor));
     const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
     const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
     const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
     const auto nnDuration = NN_TRY(makeOptionalDuration(durationNs));
+    auto nnHints = NN_TRY(convertInput(hints));
+    auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
 
     auto [syncFence, executeFencedInfoCallback] = NN_TRY(preparedModel.executeFenced(
-            nnRequest, nnWaitFor, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration, nnDuration));
+            nnRequest, nnWaitFor, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration, nnDuration,
+            nnHints, nnExtensionNameToPrefix));
+
+    ndk::ScopedFileDescriptor fileDescriptor;
+    if (syncFence.hasFd()) {
+        auto uniqueFd = NN_TRY(nn::dupFd(syncFence.getFd()));
+        fileDescriptor = ndk::ScopedFileDescriptor(uniqueFd.release());
+    }
+
+    return FencedExecutionResult{.callback = ndk::SharedRefBase::make<FencedExecutionCallback>(
+                                         std::move(executeFencedInfoCallback)),
+                                 .syncFence = std::move(fileDescriptor)};
+}
+
+nn::GeneralResult<nn::SharedExecution> createReusableExecution(
+        const nn::IPreparedModel& preparedModel, const Request& request, bool measureTiming,
+        int64_t loopTimeoutDurationNs, const std::vector<TokenValuePair>& hints,
+        const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
+    const auto nnRequest = NN_TRY(convertInput(request));
+    const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
+    const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
+    auto nnHints = NN_TRY(convertInput(hints));
+    auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
+
+    return preparedModel.createReusableExecution(nnRequest, nnMeasureTiming, nnLoopTimeoutDuration,
+                                                 nnHints, nnExtensionNameToPrefix);
+}
+
+nn::ExecutionResult<ExecutionResult> executeSynchronously(const nn::IExecution& execution,
+                                                          int64_t deadlineNs) {
+    const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
+
+    const auto result = execution.compute(nnDeadline);
+
+    if (!result.ok() && result.error().code == nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE) {
+        const auto& [message, code, outputShapes] = result.error();
+        LOG(ERROR) << "executeSynchronously failed with " << code << ": " << message;
+        return ExecutionResult{.outputSufficientSize = false,
+                               .outputShapes = utils::convert(outputShapes).value(),
+                               .timing = {.timeInDriverNs = -1, .timeOnDeviceNs = -1}};
+    }
+
+    const auto& [outputShapes, timing] = NN_TRY(result);
+    return ExecutionResult{.outputSufficientSize = true,
+                           .outputShapes = utils::convert(outputShapes).value(),
+                           .timing = utils::convert(timing).value()};
+}
+
+nn::GeneralResult<FencedExecutionResult> executeFenced(
+        const nn::IExecution& execution, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+        int64_t deadlineNs, int64_t durationNs) {
+    const auto nnWaitFor = NN_TRY(convertSyncFences(waitFor));
+    const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
+    const auto nnDuration = NN_TRY(makeOptionalDuration(durationNs));
+
+    auto [syncFence, executeFencedInfoCallback] =
+            NN_TRY(execution.computeFenced(nnWaitFor, nnDeadline, nnDuration));
 
     ndk::ScopedFileDescriptor fileDescriptor;
     if (syncFence.hasFd()) {
@@ -179,7 +244,7 @@
                                                        int64_t loopTimeoutDurationNs,
                                                        ExecutionResult* executionResult) {
     auto result = adapter::executeSynchronously(*kPreparedModel, request, measureTiming, deadlineNs,
-                                                loopTimeoutDurationNs);
+                                                loopTimeoutDurationNs, {}, {});
     if (!result.has_value()) {
         const auto& [message, code, _] = result.error();
         const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
@@ -195,7 +260,41 @@
         bool measureTiming, int64_t deadlineNs, int64_t loopTimeoutDurationNs, int64_t durationNs,
         FencedExecutionResult* executionResult) {
     auto result = adapter::executeFenced(*kPreparedModel, request, waitFor, measureTiming,
-                                         deadlineNs, loopTimeoutDurationNs, durationNs);
+                                         deadlineNs, loopTimeoutDurationNs, durationNs, {}, {});
+    if (!result.has_value()) {
+        const auto& [message, code] = result.error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    *executionResult = std::move(result).value();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PreparedModel::executeSynchronouslyWithConfig(const Request& request,
+                                                                 const ExecutionConfig& config,
+                                                                 int64_t deadlineNs,
+                                                                 ExecutionResult* executionResult) {
+    auto result = adapter::executeSynchronously(
+            *kPreparedModel, request, config.measureTiming, deadlineNs,
+            config.loopTimeoutDurationNs, config.executionHints, config.extensionNameToPrefix);
+    if (!result.has_value()) {
+        const auto& [message, code, _] = result.error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    *executionResult = std::move(result).value();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PreparedModel::executeFencedWithConfig(
+        const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+        const ExecutionConfig& config, int64_t deadlineNs, int64_t durationNs,
+        FencedExecutionResult* executionResult) {
+    auto result = adapter::executeFenced(*kPreparedModel, request, waitFor, config.measureTiming,
+                                         deadlineNs, config.loopTimeoutDurationNs, durationNs,
+                                         config.executionHints, config.extensionNameToPrefix);
     if (!result.has_value()) {
         const auto& [message, code] = result.error();
         const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
@@ -222,4 +321,51 @@
     return kPreparedModel;
 }
 
+ndk::ScopedAStatus PreparedModel::createReusableExecution(const Request& request,
+                                                          const ExecutionConfig& config,
+                                                          std::shared_ptr<IExecution>* execution) {
+    auto result = adapter::createReusableExecution(
+            *kPreparedModel, request, config.measureTiming, config.loopTimeoutDurationNs,
+            config.executionHints, config.extensionNameToPrefix);
+    if (!result.has_value()) {
+        const auto& [message, code] = result.error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    *execution = ndk::SharedRefBase::make<Execution>(std::move(result).value());
+    return ndk::ScopedAStatus::ok();
+}
+
+Execution::Execution(nn::SharedExecution execution) : kExecution(std::move(execution)) {
+    CHECK(kExecution != nullptr);
+}
+
+ndk::ScopedAStatus Execution::executeSynchronously(int64_t deadlineNs,
+                                                   ExecutionResult* executionResult) {
+    auto result = adapter::executeSynchronously(*kExecution, deadlineNs);
+    if (!result.has_value()) {
+        const auto& [message, code, _] = result.error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    *executionResult = std::move(result).value();
+    return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus Execution::executeFenced(const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+                                            int64_t deadlineNs, int64_t durationNs,
+                                            FencedExecutionResult* executionResult) {
+    auto result = adapter::executeFenced(*kExecution, waitFor, deadlineNs, durationNs);
+    if (!result.has_value()) {
+        const auto& [message, code] = result.error();
+        const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+        return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+                static_cast<int32_t>(aidlCode), message.c_str());
+    }
+    *executionResult = std::move(result).value();
+    return ndk::ScopedAStatus::ok();
+}
+
 }  // namespace aidl::android::hardware::neuralnetworks::adapter
diff --git a/neuralnetworks/utils/adapter/hidl/Android.bp b/neuralnetworks/utils/adapter/hidl/Android.bp
index d073106..6875daa 100644
--- a/neuralnetworks/utils/adapter/hidl/Android.bp
+++ b/neuralnetworks/utils/adapter/hidl/Android.bp
@@ -30,17 +30,16 @@
     local_include_dirs: ["include/nnapi/hal"],
     export_include_dirs: ["include"],
     static_libs: [
-        "neuralnetworks_types",
-        "neuralnetworks_utils_hal_1_0",
-        "neuralnetworks_utils_hal_1_1",
-        "neuralnetworks_utils_hal_1_2",
-        "neuralnetworks_utils_hal_1_3",
-    ],
-    shared_libs: [
         "android.hardware.neuralnetworks@1.0",
         "android.hardware.neuralnetworks@1.1",
         "android.hardware.neuralnetworks@1.2",
         "android.hardware.neuralnetworks@1.3",
         "libfmq",
+        "neuralnetworks_types",
+        "neuralnetworks_utils_hal_1_0",
+        "neuralnetworks_utils_hal_1_1",
+        "neuralnetworks_utils_hal_1_2",
+        "neuralnetworks_utils_hal_1_3",
+        "neuralnetworks_utils_hal_common",
     ],
 }
diff --git a/neuralnetworks/utils/adapter/hidl/src/Burst.cpp b/neuralnetworks/utils/adapter/hidl/src/Burst.cpp
index 8b2e1dd..e3b165b 100644
--- a/neuralnetworks/utils/adapter/hidl/src/Burst.cpp
+++ b/neuralnetworks/utils/adapter/hidl/src/Burst.cpp
@@ -250,7 +250,7 @@
     nn::MeasureTiming canonicalMeasure = NN_TRY(nn::convert(measure));
 
     const auto [outputShapes, timing] =
-            NN_TRY(mBurstExecutor->execute(canonicalRequest, canonicalMeasure, {}, {}));
+            NN_TRY(mBurstExecutor->execute(canonicalRequest, canonicalMeasure, {}, {}, {}, {}));
 
     return std::make_pair(NN_TRY(V1_2::utils::convert(outputShapes)),
                           NN_TRY(V1_2::utils::convert(timing)));
diff --git a/neuralnetworks/utils/adapter/hidl/src/Device.cpp b/neuralnetworks/utils/adapter/hidl/src/Device.cpp
index 4993a80..0f44638 100644
--- a/neuralnetworks/utils/adapter/hidl/src/Device.cpp
+++ b/neuralnetworks/utils/adapter/hidl/src/Device.cpp
@@ -135,7 +135,7 @@
 
     Task task = [device, nnModel = std::move(nnModel), executor, callback] {
         auto result = device->prepareModel(nnModel, nn::ExecutionPreference::DEFAULT,
-                                           nn::Priority::DEFAULT, {}, {}, {}, {});
+                                           nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
         notify(callback.get(), std::move(result), executor);
     };
     executor(std::move(task), {});
@@ -155,8 +155,8 @@
     const auto nnPreference = NN_TRY(convertInput(preference));
 
     Task task = [device, nnModel = std::move(nnModel), nnPreference, executor, callback] {
-        auto result =
-                device->prepareModel(nnModel, nnPreference, nn::Priority::DEFAULT, {}, {}, {}, {});
+        auto result = device->prepareModel(nnModel, nnPreference, nn::Priority::DEFAULT, {}, {}, {},
+                                           {}, {}, {});
         notify(callback.get(), std::move(result), executor);
     };
     executor(std::move(task), {});
@@ -185,7 +185,7 @@
                  nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache),
                  nnToken, executor, callback] {
         auto result = device->prepareModel(nnModel, nnPreference, nn::Priority::DEFAULT, {},
-                                           nnModelCache, nnDataCache, nnToken);
+                                           nnModelCache, nnDataCache, nnToken, {}, {});
         notify(callback.get(), std::move(result), executor);
     };
     executor(std::move(task), {});
@@ -215,7 +215,7 @@
                  nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache),
                  nnToken, executor, callback] {
         auto result = device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline,
-                                           nnModelCache, nnDataCache, nnToken);
+                                           nnModelCache, nnDataCache, nnToken, {}, {});
         notify(callback.get(), std::move(result), executor);
     };
     executor(std::move(task), nnDeadline);
diff --git a/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp b/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp
index 71060d5..c6055a6 100644
--- a/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp
+++ b/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp
@@ -159,7 +159,7 @@
     }
 
     Task task = [preparedModel, nnRequest = std::move(nnRequest), callback] {
-        auto result = preparedModel->execute(nnRequest, nn::MeasureTiming::NO, {}, {});
+        auto result = preparedModel->execute(nnRequest, nn::MeasureTiming::NO, {}, {}, {}, {});
         notify(callback.get(), std::move(result));
     };
     executor(std::move(task), {});
@@ -185,7 +185,7 @@
     }
 
     Task task = [preparedModel, nnRequest = std::move(nnRequest), nnMeasure, callback] {
-        auto result = preparedModel->execute(nnRequest, nnMeasure, {}, {});
+        auto result = preparedModel->execute(nnRequest, nnMeasure, {}, {}, {}, {});
         notify(callback.get(), std::move(result));
     };
     executor(std::move(task), {});
@@ -216,8 +216,8 @@
 
     Task task = [preparedModel, nnRequest = std::move(nnRequest), nnMeasure, nnDeadline,
                  nnLoopTimeoutDuration, callback] {
-        auto result =
-                preparedModel->execute(nnRequest, nnMeasure, nnDeadline, nnLoopTimeoutDuration);
+        auto result = preparedModel->execute(nnRequest, nnMeasure, nnDeadline,
+                                             nnLoopTimeoutDuration, {}, {});
         notify(callback.get(), std::move(result));
     };
     executor(std::move(task), nnDeadline);
@@ -232,7 +232,7 @@
     const auto nnMeasure = NN_TRY(convertInput(measure));
 
     const auto [outputShapes, timing] =
-            NN_TRY(preparedModel->execute(nnRequest, nnMeasure, {}, {}));
+            NN_TRY(preparedModel->execute(nnRequest, nnMeasure, {}, {}, {}, {}));
 
     auto hidlOutputShapes = NN_TRY(V1_2::utils::convert(outputShapes));
     const auto hidlTiming = NN_TRY(V1_2::utils::convert(timing));
@@ -248,8 +248,8 @@
     const auto nnDeadline = NN_TRY(convertInput(deadline));
     const auto nnLoopTimeoutDuration = NN_TRY(convertInput(loopTimeoutDuration));
 
-    const auto [outputShapes, timing] =
-            NN_TRY(preparedModel->execute(nnRequest, nnMeasure, nnDeadline, nnLoopTimeoutDuration));
+    const auto [outputShapes, timing] = NN_TRY(preparedModel->execute(
+            nnRequest, nnMeasure, nnDeadline, nnLoopTimeoutDuration, {}, {}));
 
     auto hidlOutputShapes = NN_TRY(V1_3::utils::convert(outputShapes));
     const auto hidlTiming = NN_TRY(V1_3::utils::convert(timing));
@@ -293,8 +293,9 @@
     const auto nnLoopTimeoutDuration = NN_TRY(convertInput(loopTimeoutDuration));
     const auto nnDuration = NN_TRY(convertInput(duration));
 
-    auto [syncFence, executeFencedCallback] = NN_TRY(preparedModel->executeFenced(
-            nnRequest, nnWaitFor, nnMeasure, nnDeadline, nnLoopTimeoutDuration, nnDuration));
+    auto [syncFence, executeFencedCallback] =
+            NN_TRY(preparedModel->executeFenced(nnRequest, nnWaitFor, nnMeasure, nnDeadline,
+                                                nnLoopTimeoutDuration, nnDuration, {}, {}));
 
     auto hidlSyncFence = NN_TRY(V1_3::utils::convert(syncFence.getSharedHandle()));
     auto hidlExecuteFencedCallback = sp<FencedExecutionCallback>::make(executeFencedCallback);
diff --git a/neuralnetworks/utils/common/Android.bp b/neuralnetworks/utils/common/Android.bp
index 39927a3..bfba24f 100644
--- a/neuralnetworks/utils/common/Android.bp
+++ b/neuralnetworks/utils/common/Android.bp
@@ -39,20 +39,12 @@
     srcs: ["test/*.cpp"],
     static_libs: [
         "libgmock",
-        "libneuralnetworks_common",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_common",
     ],
     shared_libs: [
-        "android.hidl.allocator@1.0",
-        "android.hidl.memory@1.0",
         "libbase",
         "libcutils",
-        "libfmq",
-        "libhidlbase",
-        "libhidlmemory",
-        "liblog",
-        "libutils",
     ],
     target: {
         android: {
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h
index e86edda..1f1245f 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h
@@ -33,12 +33,15 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 };
 
 }  // namespace android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h
index 5e62b9a..9582873 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h
@@ -52,8 +52,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h
index de30aae..3f1f290 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h
@@ -31,18 +31,23 @@
   public:
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
             const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
             const nn::OptionalDuration& loopTimeoutDuration,
-            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+            const nn::OptionalDuration& timeoutDurationAfterFence,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
 
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h
index fde2486..129431f 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h
@@ -48,18 +48,23 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
   private:
     bool isValidInternal() const EXCLUDES(mMutex);
     nn::GeneralResult<nn::SharedExecution> createReusableExecutionInternal(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const;
 
     const Factory kMakeBurst;
     mutable std::mutex mMutex;
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h
index 84ae799..267d634 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h
@@ -65,8 +65,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache,
-            const nn::CacheToken& token) const override;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
@@ -83,7 +84,9 @@
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelInternal(
             const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const;
+            const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const;
     nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCacheInternal(
             nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
             const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const;
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h
index 86533ed..bbfc220 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h
@@ -49,18 +49,23 @@
 
     nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalTimePoint& deadline,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
             const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
             nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
             const nn::OptionalDuration& loopTimeoutDuration,
-            const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+            const nn::OptionalDuration& timeoutDurationAfterFence,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedExecution> createReusableExecution(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const override;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& hints,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
 
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
 
@@ -70,7 +75,9 @@
     bool isValidInternal() const EXCLUDES(mMutex);
     nn::GeneralResult<nn::SharedExecution> createReusableExecutionInternal(
             const nn::Request& request, nn::MeasureTiming measure,
-            const nn::OptionalDuration& loopTimeoutDuration) const;
+            const nn::OptionalDuration& loopTimeoutDuration,
+            const std::vector<nn::TokenValuePair>& metaData,
+            const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const;
     nn::GeneralResult<nn::SharedBurst> configureExecutionBurstInternal() const;
 
     const Factory kMakePreparedModel;
diff --git a/neuralnetworks/utils/common/src/InvalidBurst.cpp b/neuralnetworks/utils/common/src/InvalidBurst.cpp
index 0191533..3fdfb5c 100644
--- a/neuralnetworks/utils/common/src/InvalidBurst.cpp
+++ b/neuralnetworks/utils/common/src/InvalidBurst.cpp
@@ -34,13 +34,17 @@
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> InvalidBurst::execute(
         const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
         const nn::OptionalTimePoint& /*deadline*/,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR() << "InvalidBurst";
 }
 
 nn::GeneralResult<nn::SharedExecution> InvalidBurst::createReusableExecution(
         const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR() << "InvalidBurst";
 }
 
diff --git a/neuralnetworks/utils/common/src/InvalidDevice.cpp b/neuralnetworks/utils/common/src/InvalidDevice.cpp
index 535ccb4..c8cc287 100644
--- a/neuralnetworks/utils/common/src/InvalidDevice.cpp
+++ b/neuralnetworks/utils/common/src/InvalidDevice.cpp
@@ -84,7 +84,9 @@
         const nn::Model& /*model*/, nn::ExecutionPreference /*preference*/,
         nn::Priority /*priority*/, nn::OptionalTimePoint /*deadline*/,
         const std::vector<nn::SharedHandle>& /*modelCache*/,
-        const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
+        const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR() << "InvalidDevice";
 }
 
diff --git a/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp b/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp
index 8195462..f6f978d 100644
--- a/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp
+++ b/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp
@@ -27,9 +27,12 @@
 namespace android::hardware::neuralnetworks::utils {
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
-InvalidPreparedModel::execute(const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
-                              const nn::OptionalTimePoint& /*deadline*/,
-                              const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+InvalidPreparedModel::execute(
+        const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
+        const nn::OptionalTimePoint& /*deadline*/,
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR() << "InvalidPreparedModel";
 }
 
@@ -38,13 +41,17 @@
         const nn::Request& /*request*/, const std::vector<nn::SyncFence>& /*waitFor*/,
         nn::MeasureTiming /*measure*/, const nn::OptionalTimePoint& /*deadline*/,
         const nn::OptionalDuration& /*loopTimeoutDuration*/,
-        const nn::OptionalDuration& /*timeoutDurationAfterFence*/) const {
+        const nn::OptionalDuration& /*timeoutDurationAfterFence*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR() << "InvalidPreparedModel";
 }
 
 nn::GeneralResult<nn::SharedExecution> InvalidPreparedModel::createReusableExecution(
         const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
-        const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+        const nn::OptionalDuration& /*loopTimeoutDuration*/,
+        const std::vector<nn::TokenValuePair>& /*hints*/,
+        const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
     return NN_ERROR() << "InvalidPreparedModel";
 }
 
diff --git a/neuralnetworks/utils/common/src/ResilientBurst.cpp b/neuralnetworks/utils/common/src/ResilientBurst.cpp
index 79cbe39..bf7a8ea 100644
--- a/neuralnetworks/utils/common/src/ResilientBurst.cpp
+++ b/neuralnetworks/utils/common/src/ResilientBurst.cpp
@@ -105,37 +105,49 @@
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> ResilientBurst::execute(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalTimePoint& deadline,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
-    const auto fn = [&request, measure, deadline, loopTimeoutDuration](const nn::IBurst& burst) {
-        return burst.execute(request, measure, deadline, loopTimeoutDuration);
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+    const auto fn = [&request, measure, deadline, loopTimeoutDuration, &hints,
+                     &extensionNameToPrefix](const nn::IBurst& burst) {
+        return burst.execute(request, measure, deadline, loopTimeoutDuration, hints,
+                             extensionNameToPrefix);
     };
     return protect(*this, fn);
 }
 
 nn::GeneralResult<nn::SharedExecution> ResilientBurst::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
 #if 0
     auto self = shared_from_this();
-    ResilientExecution::Factory makeExecution =
-            [burst = std::move(self), request, measure, loopTimeoutDuration] {
-        return burst->createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+    ResilientExecution::Factory makeExecution = [burst = std::move(self), request, measure,
+                                                 loopTimeoutDuration, &hints,
+                                                 &extensionNameToPrefix] {
+        return burst->createReusableExecutionInternal(request, measure, loopTimeoutDuration, hints,
+                                                      extensionNameToPrefix);
     };
     return ResilientExecution::create(std::move(makeExecution));
 #else
-    return createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+    return createReusableExecutionInternal(request, measure, loopTimeoutDuration, hints,
+                                           extensionNameToPrefix);
 #endif
 }
 
 nn::GeneralResult<nn::SharedExecution> ResilientBurst::createReusableExecutionInternal(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     if (!isValidInternal()) {
         return std::make_shared<const InvalidExecution>();
     }
-    const auto fn = [&request, measure, &loopTimeoutDuration](const nn::IBurst& burst) {
-        return burst.createReusableExecution(request, measure, loopTimeoutDuration);
+    const auto fn = [&request, measure, &loopTimeoutDuration, &hints,
+                     &extensionNameToPrefix](const nn::IBurst& burst) {
+        return burst.createReusableExecution(request, measure, loopTimeoutDuration, hints,
+                                             extensionNameToPrefix);
     };
     return protect(*this, fn);
 }
diff --git a/neuralnetworks/utils/common/src/ResilientDevice.cpp b/neuralnetworks/utils/common/src/ResilientDevice.cpp
index 2023c9a..a5c2640 100644
--- a/neuralnetworks/utils/common/src/ResilientDevice.cpp
+++ b/neuralnetworks/utils/common/src/ResilientDevice.cpp
@@ -179,19 +179,21 @@
 nn::GeneralResult<nn::SharedPreparedModel> ResilientDevice::prepareModel(
         const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
         nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
 #if 0
     auto self = shared_from_this();
     ResilientPreparedModel::Factory makePreparedModel = [device = std::move(self), model,
                                                          preference, priority, deadline, modelCache,
-                                                         dataCache, token] {
+                                                         dataCache, token, hints, extensionNameToPrefix] {
         return device->prepareModelInternal(model, preference, priority, deadline, modelCache,
-                                            dataCache, token);
+                                            dataCache, token, hints, extensionNameToPrefix);
     };
     return ResilientPreparedModel::create(std::move(makePreparedModel));
 #else
-    return prepareModelInternal(model, preference, priority, deadline, modelCache, dataCache,
-                                token);
+    return prepareModelInternal(model, preference, priority, deadline, modelCache, dataCache, token,
+                                hints, extensionNameToPrefix);
 #endif
 }
 
@@ -234,14 +236,16 @@
 nn::GeneralResult<nn::SharedPreparedModel> ResilientDevice::prepareModelInternal(
         const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
         nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
-        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+        const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     if (!isValidInternal()) {
         return std::make_shared<const InvalidPreparedModel>();
     }
-    const auto fn = [&model, preference, priority, &deadline, &modelCache, &dataCache,
-                     &token](const nn::IDevice& device) {
+    const auto fn = [&model, preference, priority, &deadline, &modelCache, &dataCache, &token,
+                     &hints, &extensionNameToPrefix](const nn::IDevice& device) {
         return device.prepareModel(model, preference, priority, deadline, modelCache, dataCache,
-                                   token);
+                                   token, hints, extensionNameToPrefix);
     };
     return protect(*this, fn, /*blocking=*/false);
 }
diff --git a/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp b/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp
index 1ae19bc..b5843c0 100644
--- a/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp
+++ b/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp
@@ -104,43 +104,53 @@
 }
 
 nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
-ResilientPreparedModel::execute(const nn::Request& request, nn::MeasureTiming measure,
-                                const nn::OptionalTimePoint& deadline,
-                                const nn::OptionalDuration& loopTimeoutDuration) const {
-    const auto fn = [&request, measure, &deadline,
-                     &loopTimeoutDuration](const nn::IPreparedModel& preparedModel) {
-        return preparedModel.execute(request, measure, deadline, loopTimeoutDuration);
+ResilientPreparedModel::execute(
+        const nn::Request& request, nn::MeasureTiming measure,
+        const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+    const auto fn = [&request, measure, &deadline, &loopTimeoutDuration, &hints,
+                     &extensionNameToPrefix](const nn::IPreparedModel& preparedModel) {
+        return preparedModel.execute(request, measure, deadline, loopTimeoutDuration, hints,
+                                     extensionNameToPrefix);
     };
     return protect(*this, fn);
 }
 
 nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-ResilientPreparedModel::executeFenced(const nn::Request& request,
-                                      const std::vector<nn::SyncFence>& waitFor,
-                                      nn::MeasureTiming measure,
-                                      const nn::OptionalTimePoint& deadline,
-                                      const nn::OptionalDuration& loopTimeoutDuration,
-                                      const nn::OptionalDuration& timeoutDurationAfterFence) const {
+ResilientPreparedModel::executeFenced(
+        const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
+        nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const nn::OptionalDuration& timeoutDurationAfterFence,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     const auto fn = [&request, &waitFor, measure, &deadline, &loopTimeoutDuration,
-                     &timeoutDurationAfterFence](const nn::IPreparedModel& preparedModel) {
+                     &timeoutDurationAfterFence, &hints,
+                     &extensionNameToPrefix](const nn::IPreparedModel& preparedModel) {
         return preparedModel.executeFenced(request, waitFor, measure, deadline, loopTimeoutDuration,
-                                           timeoutDurationAfterFence);
+                                           timeoutDurationAfterFence, hints, extensionNameToPrefix);
     };
     return protect(*this, fn);
 }
 
 nn::GeneralResult<nn::SharedExecution> ResilientPreparedModel::createReusableExecution(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
 #if 0
     auto self = shared_from_this();
-    ResilientExecution::Factory makeExecution =
-            [preparedModel = std::move(self), request, measure, loopTimeoutDuration] {
-        return preparedModel->createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+    ResilientExecution::Factory makeExecution = [preparedModel = std::move(self), request, measure,
+                                                 loopTimeoutDuration, hints,
+                                                 extensionNameToPrefix] {
+        return preparedModel->createReusableExecutionInternal(request, measure, loopTimeoutDuration,
+                                                              hints, extensionNameToPrefix);
     };
     return ResilientExecution::create(std::move(makeExecution));
 #else
-    return createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+    return createReusableExecutionInternal(request, measure, loopTimeoutDuration, hints,
+                                           extensionNameToPrefix);
 #endif
 }
 
@@ -159,13 +169,16 @@
 
 nn::GeneralResult<nn::SharedExecution> ResilientPreparedModel::createReusableExecutionInternal(
         const nn::Request& request, nn::MeasureTiming measure,
-        const nn::OptionalDuration& loopTimeoutDuration) const {
+        const nn::OptionalDuration& loopTimeoutDuration,
+        const std::vector<nn::TokenValuePair>& hints,
+        const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
     if (!isValidInternal()) {
         return std::make_shared<const InvalidExecution>();
     }
-    const auto fn = [&request, measure,
-                     &loopTimeoutDuration](const nn::IPreparedModel& preparedModel) {
-        return preparedModel.createReusableExecution(request, measure, loopTimeoutDuration);
+    const auto fn = [&request, measure, &loopTimeoutDuration, &hints,
+                     &extensionNameToPrefix](const nn::IPreparedModel& preparedModel) {
+        return preparedModel.createReusableExecution(request, measure, loopTimeoutDuration, hints,
+                                                     extensionNameToPrefix);
     };
     return protect(*this, fn);
 }
diff --git a/neuralnetworks/utils/common/test/MockDevice.h b/neuralnetworks/utils/common/test/MockDevice.h
index a9428bc..a0fc5c3 100644
--- a/neuralnetworks/utils/common/test/MockDevice.h
+++ b/neuralnetworks/utils/common/test/MockDevice.h
@@ -39,7 +39,9 @@
     MOCK_METHOD(GeneralResult<SharedPreparedModel>, prepareModel,
                 (const Model& model, ExecutionPreference preference, Priority priority,
                  OptionalTimePoint deadline, const std::vector<SharedHandle>& modelCache,
-                 const std::vector<SharedHandle>& dataCache, const CacheToken& token),
+                 const std::vector<SharedHandle>& dataCache, const CacheToken& token,
+                 const std::vector<TokenValuePair>& hints,
+                 const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
                 (const, override));
     MOCK_METHOD(GeneralResult<SharedPreparedModel>, prepareModelFromCache,
                 (OptionalTimePoint deadline, const std::vector<SharedHandle>& modelCache,
diff --git a/neuralnetworks/utils/common/test/MockPreparedModel.h b/neuralnetworks/utils/common/test/MockPreparedModel.h
index c8ce006..b8613b2 100644
--- a/neuralnetworks/utils/common/test/MockPreparedModel.h
+++ b/neuralnetworks/utils/common/test/MockPreparedModel.h
@@ -27,17 +27,23 @@
   public:
     MOCK_METHOD((ExecutionResult<std::pair<std::vector<OutputShape>, Timing>>), execute,
                 (const Request& request, MeasureTiming measure, const OptionalTimePoint& deadline,
-                 const OptionalDuration& loopTimeoutDuration),
+                 const OptionalDuration& loopTimeoutDuration,
+                 const std::vector<TokenValuePair>& hints,
+                 const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
                 (const, override));
     MOCK_METHOD((GeneralResult<std::pair<SyncFence, ExecuteFencedInfoCallback>>), executeFenced,
                 (const Request& request, const std::vector<SyncFence>& waitFor,
                  MeasureTiming measure, const OptionalTimePoint& deadline,
                  const OptionalDuration& loopTimeoutDuration,
-                 const OptionalDuration& timeoutDurationAfterFence),
+                 const OptionalDuration& timeoutDurationAfterFence,
+                 const std::vector<TokenValuePair>& hints,
+                 const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
                 (const, override));
     MOCK_METHOD((GeneralResult<SharedExecution>), createReusableExecution,
-                (const nn::Request& request, nn::MeasureTiming measure,
-                 const nn::OptionalDuration& loopTimeoutDuration),
+                (const Request& request, MeasureTiming measure,
+                 const OptionalDuration& loopTimeoutDuration,
+                 const std::vector<TokenValuePair>& hints,
+                 const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
                 (const, override));
     MOCK_METHOD(GeneralResult<SharedBurst>, configureExecutionBurst, (), (const, override));
     MOCK_METHOD(std::any, getUnderlyingResource, (), (const, override));
diff --git a/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp b/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
index 0488b63..d9b8505 100644
--- a/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
+++ b/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
@@ -309,12 +309,12 @@
     // setup call
     const auto [mockDevice, mockDeviceFactory, device] = setup();
     const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
-    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(Return(mockPreparedModel));
 
     // run test
-    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -324,12 +324,12 @@
 TEST(ResilientDeviceTest, prepareModelError) {
     // setup call
     const auto [mockDevice, mockDeviceFactory, device] = setup();
-    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnGeneralFailure);
 
     // run test
-    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -339,13 +339,13 @@
 TEST(ResilientDeviceTest, prepareModelDeadObjectFailedRecovery) {
     // setup call
     const auto [mockDevice, mockDeviceFactory, device] = setup();
-    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnDeadObject);
     EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(kReturnGeneralFailure);
 
     // run test
-    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -355,18 +355,18 @@
 TEST(ResilientDeviceTest, prepareModelDeadObjectSuccessfulRecovery) {
     // setup call
     const auto [mockDevice, mockDeviceFactory, device] = setup();
-    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnDeadObject);
     const auto recoveredMockDevice = createConfiguredMockDevice();
     const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
-    EXPECT_CALL(*recoveredMockDevice, prepareModel(_, _, _, _, _, _, _))
+    EXPECT_CALL(*recoveredMockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(Return(mockPreparedModel));
     EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
 
     // run test
-    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -679,7 +679,7 @@
     device->recover(mockDevice.get(), /*blocking=*/false);
 
     // run test
-    auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+    auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
diff --git a/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
index d396ca8..276bfba 100644
--- a/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
+++ b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
@@ -104,12 +104,12 @@
 TEST(ResilientPreparedModelTest, execute) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _))
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _))
             .Times(1)
             .WillOnce(Return(kNoExecutionError));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -119,10 +119,12 @@
 TEST(ResilientPreparedModelTest, executeError) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnGeneralFailure);
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnGeneralFailure);
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -132,12 +134,12 @@
 TEST(ResilientPreparedModelTest, executeDeadObjectFailedRecovery) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
     constexpr auto ret = [] { return nn::error(nn::ErrorStatus::GENERAL_FAILURE); };
     EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(ret);
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -147,9 +149,9 @@
 TEST(ResilientPreparedModelTest, executeDeadObjectSuccessfulRecovery) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
     const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
-    EXPECT_CALL(*recoveredMockPreparedModel, execute(_, _, _, _))
+    EXPECT_CALL(*recoveredMockPreparedModel, execute(_, _, _, _, _, _))
             .Times(1)
             .WillOnce(Return(kNoExecutionError));
     EXPECT_CALL(*mockPreparedModelFactory, Call())
@@ -157,7 +159,7 @@
             .WillOnce(Return(recoveredMockPreparedModel));
 
     // run test
-    const auto result = preparedModel->execute({}, {}, {}, {});
+    const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -167,12 +169,12 @@
 TEST(ResilientPreparedModelTest, executeFenced) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(Return(kNoFencedExecutionError));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -182,12 +184,12 @@
 TEST(ResilientPreparedModelTest, executeFencedError) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnGeneralFailure);
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -197,13 +199,13 @@
 TEST(ResilientPreparedModelTest, executeFencedDeadObjectFailedRecovery) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnDeadObject);
     EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
@@ -213,11 +215,11 @@
 TEST(ResilientPreparedModelTest, executeFencedDeadObjectSuccessfulRecovery) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnDeadObject);
     const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
-    EXPECT_CALL(*recoveredMockPreparedModel, executeFenced(_, _, _, _, _, _))
+    EXPECT_CALL(*recoveredMockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(Return(kNoFencedExecutionError));
     EXPECT_CALL(*mockPreparedModelFactory, Call())
@@ -225,7 +227,7 @@
             .WillOnce(Return(recoveredMockPreparedModel));
 
     // run test
-    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -235,12 +237,12 @@
 TEST(ResilientPreparedModelTest, createReusableExecution) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _, _))
             .Times(1)
             .WillOnce(Return(kNoCreateReusableExecutionError));
 
     // run test
-    const auto result = preparedModel->createReusableExecution({}, {}, {});
+    const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
 
     // verify result
     ASSERT_TRUE(result.has_value())
@@ -250,12 +252,12 @@
 TEST(ResilientPreparedModelTest, createReusableExecutionError) {
     // setup call
     const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
-    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _, _))
             .Times(1)
             .WillOnce(kReturnGeneralFailure);
 
     // run test
-    const auto result = preparedModel->createReusableExecution({}, {}, {});
+    const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
 
     // verify result
     ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/utils/service/Android.bp b/neuralnetworks/utils/service/Android.bp
index c3272ae..452078b 100644
--- a/neuralnetworks/utils/service/Android.bp
+++ b/neuralnetworks/utils/service/Android.bp
@@ -33,6 +33,10 @@
     local_include_dirs: ["include/nnapi/hal"],
     export_include_dirs: ["include"],
     static_libs: [
+        "android.hardware.neuralnetworks@1.0",
+        "android.hardware.neuralnetworks@1.1",
+        "android.hardware.neuralnetworks@1.2",
+        "android.hardware.neuralnetworks@1.3",
         "neuralnetworks_types",
         "neuralnetworks_utils_hal_1_0",
         "neuralnetworks_utils_hal_1_1",
@@ -40,10 +44,4 @@
         "neuralnetworks_utils_hal_1_3",
         "neuralnetworks_utils_hal_common",
     ],
-    shared_libs: [
-        "android.hardware.neuralnetworks@1.0",
-        "android.hardware.neuralnetworks@1.1",
-        "android.hardware.neuralnetworks@1.2",
-        "android.hardware.neuralnetworks@1.3",
-    ],
 }
diff --git a/radio/1.5/vts/functional/radio_hidl_hal_api.cpp b/radio/1.5/vts/functional/radio_hidl_hal_api.cpp
index d108951..152858f 100644
--- a/radio/1.5/vts/functional/radio_hidl_hal_api.cpp
+++ b/radio/1.5/vts/functional/radio_hidl_hal_api.cpp
@@ -1271,8 +1271,12 @@
     EXPECT_EQ(serial, radioRsp_v1_5->rspInfo.serial);
 
     int32_t firstApiLevel = android::base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+    int32_t boardApiLevel = android::base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
     // Allow devices shipping with Radio::1_5 and Android 11 to not support barring info.
-    if (firstApiLevel > 0 && firstApiLevel <= 30) {
+    // b/212384410 Some GRF targets lauched with S release but with vendor R release
+    // do not support getBarringInfo API. Allow these devices to not support barring info.
+    if ((firstApiLevel > 0 && firstApiLevel <= 30) ||
+        (boardApiLevel > 0 && boardApiLevel <= 30)) {
         ASSERT_TRUE(CheckAnyOfErrors(radioRsp_v1_5->rspInfo.error,
                                      {RadioError::NONE, RadioError::REQUEST_NOT_SUPPORTED}));
         // Early exit for devices that don't support barring info.
diff --git a/radio/aidl/aidl_api/android.hardware.radio.data/current/android/hardware/radio/data/IRadioDataIndication.aidl b/radio/aidl/aidl_api/android.hardware.radio.data/current/android/hardware/radio/data/IRadioDataIndication.aidl
index b0cc1eb..0ffa1f7 100644
--- a/radio/aidl/aidl_api/android.hardware.radio.data/current/android/hardware/radio/data/IRadioDataIndication.aidl
+++ b/radio/aidl/aidl_api/android.hardware.radio.data/current/android/hardware/radio/data/IRadioDataIndication.aidl
@@ -38,4 +38,5 @@
   oneway void keepaliveStatus(in android.hardware.radio.RadioIndicationType type, in android.hardware.radio.data.KeepaliveStatus status);
   oneway void pcoData(in android.hardware.radio.RadioIndicationType type, in android.hardware.radio.data.PcoDataInfo pco);
   oneway void unthrottleApn(in android.hardware.radio.RadioIndicationType type, in android.hardware.radio.data.DataProfileInfo dataProfileInfo);
+  oneway void slicingConfigChanged(in android.hardware.radio.RadioIndicationType type, in android.hardware.radio.data.SlicingConfig slicingConfig);
 }
diff --git a/radio/aidl/android/hardware/radio/data/IRadioDataIndication.aidl b/radio/aidl/android/hardware/radio/data/IRadioDataIndication.aidl
index 1772c88..938c695 100644
--- a/radio/aidl/android/hardware/radio/data/IRadioDataIndication.aidl
+++ b/radio/aidl/android/hardware/radio/data/IRadioDataIndication.aidl
@@ -21,6 +21,7 @@
 import android.hardware.radio.data.KeepaliveStatus;
 import android.hardware.radio.data.PcoDataInfo;
 import android.hardware.radio.data.SetupDataCallResult;
+import android.hardware.radio.data.SlicingConfig;
 
 /**
  * Interface declaring unsolicited radio indications for data APIs.
@@ -72,4 +73,17 @@
      * @param dataProfileInfo Data profile info.
      */
     void unthrottleApn(in RadioIndicationType type, in DataProfileInfo dataProfileInfo);
+
+    /**
+     * Indicates the current slicing configuration including URSP rules and NSSAIs
+     * (configured, allowed and rejected). URSP stands for UE route selection policy and is defined
+     * in 3GPP TS 24.526 Section 4.2. An NSSAI is a collection of network slices. Each network slice
+     * is identified by an S-NSSAI and is represented by the struct SliceInfo. NSSAI and S-NSSAI
+     * are defined in 3GPP TS 24.501.
+     *
+     * @param type Type of radio indication
+     * @param slicingConfig Current slicing configuration
+     *
+     */
+    void slicingConfigChanged(in RadioIndicationType type, in SlicingConfig slicingConfig);
 }
diff --git a/radio/aidl/compat/libradiocompat/data/RadioIndication-data.cpp b/radio/aidl/compat/libradiocompat/data/RadioIndication-data.cpp
index 1d367d2..602bb39 100644
--- a/radio/aidl/compat/libradiocompat/data/RadioIndication-data.cpp
+++ b/radio/aidl/compat/libradiocompat/data/RadioIndication-data.cpp
@@ -85,4 +85,11 @@
     return {};
 }
 
+Return<void> RadioIndication::slicingConfigChanged(V1_0::RadioIndicationType type,
+                                                   const V1_6::SlicingConfig& slicingConfig) {
+    LOG_CALL << type;
+    dataCb()->slicingConfigChanged(toAidl(type), toAidl(slicingConfig));
+    return {};
+}
+
 }  // namespace android::hardware::radio::compat
diff --git a/radio/aidl/compat/libradiocompat/include/libradiocompat/RadioIndication.h b/radio/aidl/compat/libradiocompat/include/libradiocompat/RadioIndication.h
index c668af5..6cfd59c 100644
--- a/radio/aidl/compat/libradiocompat/include/libradiocompat/RadioIndication.h
+++ b/radio/aidl/compat/libradiocompat/include/libradiocompat/RadioIndication.h
@@ -186,6 +186,8 @@
             V1_0::RadioIndicationType type,
             const hidl_vec<V1_6::SetupDataCallResult>& dcList) override;
     Return<void> unthrottleApn(V1_0::RadioIndicationType type, const hidl_string& apn) override;
+    Return<void> slicingConfigChanged(V1_0::RadioIndicationType type,
+                                      const V1_6::SlicingConfig& slicingConfig);
     Return<void> currentLinkCapacityEstimate_1_6(V1_0::RadioIndicationType type,
                                                  const V1_6::LinkCapacityEstimate& lce) override;
     Return<void> currentSignalStrength_1_6(V1_0::RadioIndicationType type,
diff --git a/radio/aidl/vts/radio_data_indication.cpp b/radio/aidl/vts/radio_data_indication.cpp
index 4d3c539..61e079e 100644
--- a/radio/aidl/vts/radio_data_indication.cpp
+++ b/radio/aidl/vts/radio_data_indication.cpp
@@ -37,3 +37,8 @@
                                                       const DataProfileInfo& /*dataProfileInfo*/) {
     return ndk::ScopedAStatus::ok();
 }
+
+ndk::ScopedAStatus RadioDataIndication::slicingConfigChanged(
+        RadioIndicationType /*type*/, const SlicingConfig& /*slicingConfig*/) {
+    return ndk::ScopedAStatus::ok();
+}
diff --git a/radio/aidl/vts/radio_data_utils.h b/radio/aidl/vts/radio_data_utils.h
index 50c7878..fb91ef6 100644
--- a/radio/aidl/vts/radio_data_utils.h
+++ b/radio/aidl/vts/radio_data_utils.h
@@ -95,6 +95,8 @@
 
     virtual ndk::ScopedAStatus unthrottleApn(RadioIndicationType type,
                                              const DataProfileInfo& dataProfile) override;
+    virtual ndk::ScopedAStatus slicingConfigChanged(RadioIndicationType type,
+                                                    const SlicingConfig& slicingConfig) override;
 };
 
 // The main test class for Radio AIDL Data.
diff --git a/security/dice/aidl/Android.bp b/security/dice/aidl/Android.bp
index af9dd33..01bc91e 100644
--- a/security/dice/aidl/Android.bp
+++ b/security/dice/aidl/Android.bp
@@ -41,6 +41,10 @@
         },
         rust: {
             enabled: true,
+            apex_available: [
+                "//apex_available:platform",
+                "com.android.compos",
+            ],
         },
     },
     //     versions: ["1"],
diff --git a/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/BccHandover.aidl b/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/BccHandover.aidl
index ab50c36..8baca94 100644
--- a/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/BccHandover.aidl
+++ b/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/BccHandover.aidl
@@ -35,7 +35,7 @@
 /* @hide */
 @RustDerive(Clone=true, Eq=true, Hash=true, Ord=true, PartialEq=true, PartialOrd=true) @VintfStability
 parcelable BccHandover {
-  byte[] cdiAttest;
-  byte[] cdiSeal;
+  byte[32] cdiAttest;
+  byte[32] cdiSeal;
   android.hardware.security.dice.Bcc bcc;
 }
diff --git a/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/InputValues.aidl b/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/InputValues.aidl
index 79583fb..e43c429 100644
--- a/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/InputValues.aidl
+++ b/security/dice/aidl/aidl_api/android.hardware.security.dice/current/android/hardware/security/dice/InputValues.aidl
@@ -35,10 +35,10 @@
 /* @hide */
 @RustDerive(Clone=true, Eq=true, Hash=true, Ord=true, PartialEq=true, PartialOrd=true) @VintfStability
 parcelable InputValues {
-  byte[] codeHash;
+  byte[64] codeHash;
   android.hardware.security.dice.Config config;
-  byte[] authorityHash;
+  byte[64] authorityHash;
   @nullable byte[] authorityDescriptor;
   android.hardware.security.dice.Mode mode = android.hardware.security.dice.Mode.NOT_INITIALIZED;
-  byte[] hidden;
+  byte[64] hidden;
 }
diff --git a/security/dice/aidl/android/hardware/security/dice/BccHandover.aidl b/security/dice/aidl/android/hardware/security/dice/BccHandover.aidl
index d522cef..6ca862c 100644
--- a/security/dice/aidl/android/hardware/security/dice/BccHandover.aidl
+++ b/security/dice/aidl/android/hardware/security/dice/BccHandover.aidl
@@ -27,13 +27,13 @@
 @RustDerive(Clone=true, Eq=true, PartialEq=true, Ord=true, PartialOrd=true, Hash=true)
 parcelable BccHandover {
     /**
-     * CDI_attest. Must a exactly 32 bytes of data.
+     * CDI_attest. Must be exactly 32 bytes of data.
      */
-    byte[] cdiAttest;
+    byte[32] cdiAttest;
     /**
-     * CDI_seal. Must a exactly 32 bytes of data.
+     * CDI_seal. Must be exactly 32 bytes of data.
      */
-    byte[] cdiSeal;
+    byte[32] cdiSeal;
     /**
      * CBOR encoded BCC.
      *
diff --git a/security/dice/aidl/android/hardware/security/dice/InputValues.aidl b/security/dice/aidl/android/hardware/security/dice/InputValues.aidl
index e44ef22..711d523 100644
--- a/security/dice/aidl/android/hardware/security/dice/InputValues.aidl
+++ b/security/dice/aidl/android/hardware/security/dice/InputValues.aidl
@@ -34,7 +34,7 @@
     /**
      * The target code hash. Must be exactly 64 bytes.
      */
-    byte[] codeHash;
+    byte[64] codeHash;
     /**
      * The configuration data.
      */
@@ -42,7 +42,7 @@
     /**
      * The authority hash. Must be exactly 64 bytes. Must be all zero if unused.
      */
-    byte[] authorityHash;
+    byte[64] authorityHash;
     /**
      * Optional free form authorityDescriptor.
      */
@@ -54,5 +54,5 @@
     /**
      * Optional hidden values. Must be exactly 64 bytes. Must be all zero if unused.
      */
-    byte[] hidden;
+    byte[64] hidden;
 }
diff --git a/security/keymint/aidl/android/hardware/security/keymint/IKeyMintOperation.aidl b/security/keymint/aidl/android/hardware/security/keymint/IKeyMintOperation.aidl
index ce83044..ca89555 100644
--- a/security/keymint/aidl/android/hardware/security/keymint/IKeyMintOperation.aidl
+++ b/security/keymint/aidl/android/hardware/security/keymint/IKeyMintOperation.aidl
@@ -227,7 +227,8 @@
      * o PaddingMode::RSA_PSS.  For PSS-padded signature operations, the PSS salt length must match
      *   the size of the PSS digest selected.  The digest specified with Tag::DIGEST in params
      *   on begin() must be used as the PSS digest algorithm, MGF1 must be used as the mask
-     *   generation function and SHA1 must be used as the MGF1 digest algorithm.
+     *   generation function and the digest specified with Tag:DIGEST in params on begin() must also
+     *   be used as the MGF1 digest algorithm.
      *
      * -- ECDSA keys --
      *
diff --git a/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp b/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp
index 02462fc..374f2da 100644
--- a/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp
+++ b/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp
@@ -812,6 +812,7 @@
         if (padding == PaddingMode::RSA_PSS) {
             EXPECT_GT(EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, RSA_PKCS1_PSS_PADDING), 0);
             EXPECT_GT(EVP_PKEY_CTX_set_rsa_pss_saltlen(pkey_ctx, EVP_MD_size(md)), 0);
+            EXPECT_GT(EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, md), 0);
         }
 
         ASSERT_EQ(1, EVP_DigestVerifyUpdate(&digest_ctx,
diff --git a/security/keymint/aidl/vts/functional/KeyMintTest.cpp b/security/keymint/aidl/vts/functional/KeyMintTest.cpp
index 5b80b6f..d109058 100644
--- a/security/keymint/aidl/vts/functional/KeyMintTest.cpp
+++ b/security/keymint/aidl/vts/functional/KeyMintTest.cpp
@@ -4608,8 +4608,10 @@
     auto params = AuthorizationSetBuilder().BlockMode(BlockMode::ECB).Padding(PaddingMode::PKCS7);
 
     // Try various message lengths; all should work.
-    for (size_t i = 0; i < 32; ++i) {
-        string message(i, 'a');
+    for (size_t i = 0; i <= 48; i++) {
+        SCOPED_TRACE(testing::Message() << "i = " << i);
+        // Edge case: '\t' (0x09) is also a valid PKCS7 padding character.
+        string message(i, '\t');
         string ciphertext = EncryptMessage(message, params);
         EXPECT_EQ(i + 16 - (i % 16), ciphertext.size());
         string plaintext = DecryptMessage(ciphertext, params);
@@ -4633,7 +4635,7 @@
     auto params = AuthorizationSetBuilder().BlockMode(BlockMode::ECB).Padding(PaddingMode::PKCS7);
 
     // Try various message lengths; all should fail
-    for (size_t i = 0; i < 32; ++i) {
+    for (size_t i = 0; i <= 48; i++) {
         string message(i, 'a');
         EXPECT_EQ(ErrorCode::INCOMPATIBLE_PADDING_MODE, Begin(KeyPurpose::ENCRYPT, params));
     }
@@ -5775,8 +5777,8 @@
 
     ASSERT_GT(key_blob_.size(), 0U);
 
-    // Two-block message.
-    string message = "1234567890123456";
+    // Four-block message.
+    string message = "12345678901234561234567890123456";
     vector<uint8_t> iv1;
     string ciphertext1 = EncryptMessage(message, BlockMode::CBC, PaddingMode::NONE, &iv1);
     EXPECT_EQ(message.size(), ciphertext1.size());
@@ -5936,8 +5938,10 @@
                                                  .Padding(PaddingMode::PKCS7)));
 
     // Try various message lengths; all should work.
-    for (size_t i = 0; i < 32; ++i) {
-        string message(i, 'a');
+    for (size_t i = 0; i <= 32; i++) {
+        SCOPED_TRACE(testing::Message() << "i = " << i);
+        // Edge case: '\t' (0x09) is also a valid PKCS7 padding character, albeit not for 3DES.
+        string message(i, '\t');
         vector<uint8_t> iv;
         string ciphertext = EncryptMessage(message, BlockMode::CBC, PaddingMode::PKCS7, &iv);
         EXPECT_EQ(i + 8 - (i % 8), ciphertext.size());
@@ -5959,7 +5963,7 @@
                                                  .Padding(PaddingMode::NONE)));
 
     // Try various message lengths; all should fail.
-    for (size_t i = 0; i < 32; ++i) {
+    for (size_t i = 0; i <= 32; i++) {
         auto begin_params =
                 AuthorizationSetBuilder().BlockMode(BlockMode::CBC).Padding(PaddingMode::PKCS7);
         EXPECT_EQ(ErrorCode::INCOMPATIBLE_PADDING_MODE, Begin(KeyPurpose::ENCRYPT, begin_params));
@@ -5990,6 +5994,7 @@
                                 .Authorization(TAG_NONCE, iv);
 
     for (size_t i = 0; i < kMaxPaddingCorruptionRetries; ++i) {
+        SCOPED_TRACE(testing::Message() << "i = " << i);
         ++ciphertext[ciphertext.size() / 2];
         EXPECT_EQ(ErrorCode::OK, Begin(KeyPurpose::DECRYPT, begin_params));
         string plaintext;
diff --git a/wifi/hostapd/aidl/vts/functional/VtsHalHostapdTargetTest.cpp b/wifi/hostapd/aidl/vts/functional/VtsHalHostapdTargetTest.cpp
index 41c54b3..8f88196 100644
--- a/wifi/hostapd/aidl/vts/functional/VtsHalHostapdTargetTest.cpp
+++ b/wifi/hostapd/aidl/vts/functional/VtsHalHostapdTargetTest.cpp
@@ -431,6 +431,7 @@
     EXPECT_TRUE(status.isOk());
 }
 
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(HostapdAidl);
 INSTANTIATE_TEST_SUITE_P(
     Hostapd, HostapdAidl,
     testing::ValuesIn(android::getAidlHalInstanceNames(IHostapd::descriptor)),
diff --git a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/AuthAlgMask.aidl b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/AuthAlgMask.aidl
index e8101ea..039d41d 100644
--- a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/AuthAlgMask.aidl
+++ b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/AuthAlgMask.aidl
@@ -19,7 +19,7 @@
 /**
  * Possible mask of values for AuthAlg param.
  * See /external/wpa_supplicant_8/src/common/defs.h for
- * all possible values (starting at WPA_AUTH_ALG_OPEN).
+ * the historical values (starting at WPA_AUTH_ALG_OPEN).
  */
 @VintfStability
 @Backing(type="int")
diff --git a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/GroupCipherMask.aidl b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/GroupCipherMask.aidl
index d5b26ad..99e9da0 100644
--- a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/GroupCipherMask.aidl
+++ b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/GroupCipherMask.aidl
@@ -19,7 +19,7 @@
 /**
  * Possible mask of values for GroupCipher param.
  * See /external/wpa_supplicant_8/src/common/defs.h for
- * all possible values (starting at WPA_CIPHER_WEP40).
+ * the historical values (starting at WPA_CIPHER_WEP40).
  */
 @VintfStability
 @Backing(type="int")
diff --git a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/KeyMgmtMask.aidl b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/KeyMgmtMask.aidl
index 3225585..f0c3345 100644
--- a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/KeyMgmtMask.aidl
+++ b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/KeyMgmtMask.aidl
@@ -19,7 +19,7 @@
 /**
  * Possible mask of values for KeyMgmt param.
  * See /external/wpa_supplicant_8/src/common/defs.h for
- * all possible values (starting at WPA_KEY_MGMT_IEEE8021X).
+ * the historical values (starting at WPA_KEY_MGMT_IEEE8021X).
  */
 @VintfStability
 @Backing(type="int")
diff --git a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/PairwiseCipherMask.aidl b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/PairwiseCipherMask.aidl
index ad134fa..7179fea 100644
--- a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/PairwiseCipherMask.aidl
+++ b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/PairwiseCipherMask.aidl
@@ -19,7 +19,7 @@
 /**
  * Possible mask of values for PairwiseCipher param.
  * See /external/wpa_supplicant_8/src/common/defs.h for
- * all possible values (starting at WPA_CIPHER_NONE).
+ * the historical values (starting at WPA_CIPHER_NONE).
  */
 @VintfStability
 @Backing(type="int")
diff --git a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/ProtoMask.aidl b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/ProtoMask.aidl
index 65c832b..dc3d80b 100644
--- a/wifi/supplicant/aidl/android/hardware/wifi/supplicant/ProtoMask.aidl
+++ b/wifi/supplicant/aidl/android/hardware/wifi/supplicant/ProtoMask.aidl
@@ -19,7 +19,7 @@
 /**
  * Possible mask of values for Proto param.
  * See /external/wpa_supplicant_8/src/common/defs.h for
- * all possible values (starting at WPA_PROTO_WPA).
+ * the historical values (starting at WPA_PROTO_WPA).
  */
 @VintfStability
 @Backing(type="int")