Merge "AAudio: Scale capacity by device sample rate" into main
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index d0a4774..4e9b547 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -46,6 +46,7 @@
 using ::aidl::android::hardware::audio::core::IStreamOut;
 using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
 using ::aidl::android::hardware::audio::core::StreamDescriptor;
+using ::aidl::android::hardware::audio::core::VendorParameter;
 using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
 using ::aidl::android::media::audio::IHalAdapterVendorExtension;
 
@@ -73,7 +74,15 @@
 namespace android {
 
 using HalCommand = StreamDescriptor::Command;
+
 namespace {
+
+static constexpr int32_t kAidlVersion1 = 1;
+static constexpr int32_t kAidlVersion2 = 2;
+static constexpr int32_t kAidlVersion3 = 3;
+
+static constexpr const char* kCreateMmapBuffer = "aosp.createMmapBuffer";
+
 template<HalCommand::Tag cmd> HalCommand makeHalCommand() {
     return HalCommand::make<cmd>(::aidl::android::media::audio::common::Void{});
 }
@@ -135,15 +144,27 @@
         mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
 
-    if (mStream != nullptr) {
-        mContext.getCommandMQ()->setErrorHandler(
-                fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
-        mContext.getReplyMQ()->setErrorHandler(
-                fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
-        if (mContext.getDataMQ() != nullptr) {
-            mContext.getDataMQ()->setErrorHandler(
-                    fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
+    if (mStream == nullptr) return;
+
+    mContext.getCommandMQ()->setErrorHandler(
+            fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
+    mContext.getReplyMQ()->setErrorHandler(
+            fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
+    if (mContext.getDataMQ() != nullptr) {
+        mContext.getDataMQ()->setErrorHandler(
+                fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
+    }
+
+    if (auto status = mStream->getInterfaceVersion(&mAidlInterfaceVersion); status.isOk()) {
+        if (mAidlInterfaceVersion > kAidlVersion3) {
+            mSupportsCreateMmapBuffer = true;
+        } else {
+            VendorParameter createMmapBuffer{.id = kCreateMmapBuffer};
+            mSupportsCreateMmapBuffer =
+                    mStream->setVendorParameters({createMmapBuffer}, false).isOk();
         }
+    } else {
+        AUGMENT_LOG(E, "failed to retrieve stream interface version: %s", status.getMessage());
     }
 }
 
@@ -400,12 +421,17 @@
     AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, &statePositions));
     if (reply.hardware.frames == StreamDescriptor::Position::UNKNOWN ||
         reply.hardware.timeNs == StreamDescriptor::Position::UNKNOWN) {
+        AUGMENT_LOG(W, "No position was reported by the HAL");
         return INVALID_OPERATION;
     }
-    *frames = reply.hardware.frames;
+    int64_t mostRecentResetPoint = std::max(statePositions.hardware.framesAtStandby,
+                                            statePositions.hardware.framesAtFlushOrDrain);
+    int64_t aidlFrames = reply.hardware.frames;
+    *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     *timestamp = reply.hardware.timeNs;
     return OK;
 }
@@ -627,7 +653,7 @@
                                 || mStatePositions.drainState == StatePositions::DrainState::ALL))) {
             AUGMENT_LOG(D, "setting position %lld as clip end",
                     (long long)mLastReply.observable.frames);
-            mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+            mStatePositions.observable.framesAtFlushOrDrain = mLastReply.observable.frames;
         }
         mStatePositions.drainState = mStatePositions.drainState == StatePositions::DrainState::EN ?
                 StatePositions::DrainState::EN_RECEIVED : StatePositions::DrainState::NONE;
@@ -650,12 +676,25 @@
     if (!mContext.isMmapped()) {
         return BAD_VALUE;
     }
+    if (mSupportsCreateMmapBuffer && (mAidlInterfaceVersion <= kAidlVersion3)) {
+        std::vector<VendorParameter> parameters;
+        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                        mStream->getVendorParameters({kCreateMmapBuffer}, &parameters)));
+        if (parameters.size() == 1) {
+            std::optional<MmapBufferDescriptor> result;
+            RETURN_STATUS_IF_ERROR(parameters[0].ext.getParcelable(&result));
+            mContext.updateMmapBufferDescriptor(std::move(*result));
+        } else {
+            AUGMENT_LOG(E, "invalid output from 'createMmapBuffer' via 'getVendorParameters': %s",
+                        internal::ToString(parameters).c_str());
+            return INVALID_OPERATION;
+        }
+    }
     const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor();
     info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get();
     info->buffer_size_frames = mContext.getBufferSizeFrames();
     info->burst_size_frames = bufferDescriptor.burstSizeFrames;
     info->flags = static_cast<audio_mmap_buffer_flag>(bufferDescriptor.flags);
-
     return OK;
 }
 
@@ -727,15 +766,18 @@
                 if (reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
                     if (command.getTag() == StreamDescriptor::Command::standby &&
                             reply->state == StreamDescriptor::State::STANDBY) {
-                        mStatePositions.framesAtStandby = reply->observable.frames;
+                        mStatePositions.observable.framesAtStandby = reply->observable.frames;
+                        mStatePositions.hardware.framesAtStandby = reply->hardware.frames;
                     } else if (command.getTag() == StreamDescriptor::Command::flush &&
                             reply->state == StreamDescriptor::State::IDLE) {
-                        mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                        mStatePositions.observable.framesAtFlushOrDrain = reply->observable.frames;
+                        mStatePositions.hardware.framesAtFlushOrDrain = reply->observable.frames;
                     } else if (!mContext.isAsynchronous() &&
                             command.getTag() == StreamDescriptor::Command::drain &&
                             (reply->state == StreamDescriptor::State::IDLE ||
                                     reply->state == StreamDescriptor::State::DRAINING)) {
-                        mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                        mStatePositions.observable.framesAtFlushOrDrain = reply->observable.frames;
+                        mStatePositions.hardware.framesAtFlushOrDrain = reply->observable.frames;
                     } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
                 }
                 if (mContext.isAsynchronous() &&
@@ -767,15 +809,19 @@
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
         StatePositions* statePositions) {
     bool doUpdate = false;
+    HalCommand cmd;
     {
         std::lock_guard l(mLock);
         doUpdate = uptimeNanos() > mLastReplyExpirationNs;
+        cmd = mContext.isMmapped() && mSupportsCreateMmapBuffer
+                && mLastReply.state == StreamDescriptor::State::ACTIVE
+                ? makeHalCommand<HalCommand::Tag::burst>(0)
+                : makeHalCommand<HalCommand::Tag::getStatus>();
     }
     if (doUpdate) {
         // Since updates are paced, it is OK to perform them from any thread, they should
         // not interfere with I/O operations of the worker.
-        return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                reply, true /*safeFromNonWorkerThread */, statePositions);
+        return sendCommand(cmd, reply, true /*safeFromNonWorkerThread */, statePositions);
     } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
@@ -882,10 +928,10 @@
     // See the table at the start of 'StreamHalInterface' on when it needs to reset.
     int64_t mostRecentResetPoint;
     if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
-        mostRecentResetPoint = statePositions.framesAtStandby;
+        mostRecentResetPoint = statePositions.observable.framesAtStandby;
     } else {
-        mostRecentResetPoint =
-                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        mostRecentResetPoint = std::max(statePositions.observable.framesAtStandby,
+                statePositions.observable.framesAtFlushOrDrain);
     }
     *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
@@ -961,8 +1007,8 @@
     if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
         *frames = aidlFrames;
     } else {
-        const int64_t mostRecentResetPoint =
-                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        const int64_t mostRecentResetPoint = std::max(statePositions.observable.framesAtStandby,
+                statePositions.observable.framesAtFlushOrDrain);
         *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index 32566e9..a026f52 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -37,9 +37,6 @@
 #include "ConversionHelperAidl.h"
 #include "StreamPowerLog.h"
 
-using ::aidl::android::hardware::audio::common::AudioOffloadMetadata;
-using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
-
 namespace android {
 
 class StreamContextAidl {
@@ -87,10 +84,14 @@
     ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
-    const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
+    const ::aidl::android::hardware::audio::core::MmapBufferDescriptor&
+            getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
     size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames; }
     int getIoHandle() const { return mIoHandle; }
     bool hasClipTransitionSupport() const { return mHasClipTransitionSupport; }
+    void updateMmapBufferDescriptor(
+            ::aidl::android::hardware::audio::core::MmapBufferDescriptor&& desc) {
+        mMmapBufferDescriptor = std::move(desc); }
 
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
@@ -106,7 +107,7 @@
         using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
         return descriptor.audio.getTag() == Tag::mmap;
     }
-    static MmapBufferDescriptor maybeGetMmapBuffer(
+    static ::aidl::android::hardware::audio::core::MmapBufferDescriptor maybeGetMmapBuffer(
             ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
         using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
         if (descriptor.audio.getTag() == Tag::mmap) {
@@ -122,7 +123,7 @@
     std::unique_ptr<DataMQ> mDataMQ;
     bool mIsAsynchronous;
     bool mIsMmapped;
-    MmapBufferDescriptor mMmapBufferDescriptor;
+    ::aidl::android::hardware::audio::core::MmapBufferDescriptor mMmapBufferDescriptor;
     int mIoHandle;
     bool mHasClipTransitionSupport;
 };
@@ -183,9 +184,13 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
-    struct StatePositions {
+    struct FrameCounters {
         int64_t framesAtFlushOrDrain;
         int64_t framesAtStandby;
+    };
+    struct StatePositions {
+        FrameCounters observable;
+        FrameCounters hardware;
         enum DrainState : int32_t { NONE, ALL, EN /*early notify*/, EN_RECEIVED };
         DrainState drainState;
     };
@@ -288,7 +293,7 @@
 
     const bool mIsInput;
     const audio_config_base_t mConfig;
-    const StreamContextAidl mContext;
+    StreamContextAidl mContext;
     // This lock is used to make sending of a command and receiving a reply an atomic
     // operation. Otherwise, when two threads are trying to send a command, they may both advance to
     // reading of the reply once the HAL has consumed the command from the MQ, and that creates a
@@ -340,6 +345,8 @@
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
+    int32_t mAidlInterfaceVersion = -1;
+    bool mSupportsCreateMmapBuffer = false;
 };
 
 class CallbackBroker;
@@ -446,7 +453,7 @@
     const wp<CallbackBroker> mCallbackBroker;
     mediautils::atomic_wp<StreamOutHalInterfaceCallback> mClientCallback;
 
-    AudioOffloadMetadata mOffloadMetadata;
+    ::aidl::android::hardware::audio::common::AudioOffloadMetadata mOffloadMetadata;
 
     // Can not be constructed directly by clients.
     StreamOutHalAidl(
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 0a72dfa..1730bfa 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -61,6 +61,10 @@
 
 class VendorParameterMock {
   public:
+    void clearParameters() {
+        mAsyncParameters.clear();
+        mSyncParameters.clear();
+    }
     const std::vector<std::string>& getRetrievedParameterIds() const { return mGetParameterIds; }
     const std::vector<VendorParameter>& getAsyncParameters() const { return mAsyncParameters; }
     const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
@@ -995,6 +999,8 @@
                                   false /*hasClipTransitionSupport*/);
         mStream = sp<StreamHalAidl>::make("test", false /*isInput*/, config, 0 /*nominalLatency*/,
                                           std::move(context), mStreamCommon, mVendorExt);
+        // The stream may check for some properties after creating.
+        mStreamCommon->clearParameters();
     }
     void TearDown() override {
         mStream.clear();
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index c381ed2..5e8ad80 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -93,6 +93,7 @@
         AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES;
 const char * const AudioParameter::keyClipTransitionSupport =
         AUDIO_PARAMETER_CLIP_TRANSITION_SUPPORT;
+const char * const keyCreateMmapBuffer = AUDIO_PARAMETER_CREATE_MMAP_BUFFER;
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index d93974d..7b83fd2 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -152,6 +152,7 @@
     static const char * const keyOffloadCodecPaddingSamples;
 
     static const char * const keyClipTransitionSupport;
+    static const char * const keyCreateMmapBuffer;
 
     String8 toString() const { return toStringImpl(true); }
     String8 keysToString() const { return toStringImpl(false); }