blob: cf4b849e810db17e905f1269944bea06c257e0a7 [file] [log] [blame]
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#include "hidl/HidlSupport.h"
#define LOG_TAG "MediaCodec"
#include <utils/Log.h>
#include <set>
#include <stdlib.h>
#include <inttypes.h>
#include <stdlib.h>
#include <dlfcn.h>
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
#include "PlaybackDurationAccumulator.h"
#include <android/binder_manager.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
#include <aidl/android/media/BnResourceManagerClient.h>
#include <aidl/android/media/IResourceManagerService.h>
#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
#include <android/dlext.h>
#include <binder/IMemory.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
#include <gui/Surface.h>
#include <hidlmemory/FrameworkUtils.h>
#include <mediadrm/ICrypto.h>
#include <media/IOMX.h>
#include <media/MediaCodecBuffer.h>
#include <media/MediaCodecInfo.h>
#include <media/MediaMetricsItem.h>
#include <media/MediaResource.h>
#include <media/NdkMediaErrorPriv.h>
#include <media/NdkMediaFormat.h>
#include <media/NdkMediaFormatPriv.h>
#include <media/formatshaper/FormatShaper.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/avc_utils.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/ACodec.h>
#include <media/stagefright/BatteryChecker.h>
#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/CCodec.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/SurfaceUtils.h>
#include <nativeloader/dlext_namespaces.h>
#include <private/android_filesystem_config.h>
#include <utils/Singleton.h>
namespace android {
using Status = ::ndk::ScopedAStatus;
using aidl::android::media::BnResourceManagerClient;
using aidl::android::media::IResourceManagerClient;
using aidl::android::media::IResourceManagerService;
// key for media statistics
static const char *kCodecKeyName = "codec";
// attrs for media statistics
// NB: these are matched with public Java API constants defined
// in frameworks/base/media/java/android/media/MediaCodec.java
// These must be kept synchronized with the constants there.
static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
static const char *kCodecModeAudio = "audio";
static const char *kCodecModeImage = "image";
static const char *kCodecModeUnknown = "unknown";
static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
static const char *kCodecPriority = "android.media.mediacodec.priority";
static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
// Min/Max QP before shaping
static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
// Min/Max QP after shaping
static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
// NB: These are not yet exposed as public Java API constants.
static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
static const char *kCodecError = "android.media.mediacodec.errcode";
static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
static const char *kCodecErrorState = "android.media.mediacodec.errstate";
static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
// the kCodecRecent* fields appear only in getMetrics() results
static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
static const char *kCodecPlaybackDurationSec =
"android.media.mediacodec.playback-duration-sec"; /* in sec */
/* -1: shaper disabled
>=0: number of fields changed */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
// XXX suppress until we get our representation right
static bool kEmitHistogram = false;
static int64_t getId(IResourceManagerClient const * client) {
return (int64_t) client;
}
static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
return getId(client.get());
}
static bool isResourceError(status_t err) {
return (err == NO_MEMORY);
}
static const int kMaxRetry = 2;
static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
static const int kNumBuffersAlign = 16;
static const C2MemoryUsage kDefaultReadWriteUsage{
C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
////////////////////////////////////////////////////////////////////////////////
struct ResourceManagerClient : public BnResourceManagerClient {
explicit ResourceManagerClient(MediaCodec* codec, int32_t pid) :
mMediaCodec(codec), mPid(pid) {}
Status reclaimResource(bool* _aidl_return) override {
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
// Codec is already gone, so remove the resources as well
::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
std::shared_ptr<IResourceManagerService> service =
IResourceManagerService::fromBinder(binder);
if (service == nullptr) {
ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
}
service->removeClient(mPid, getId(this));
*_aidl_return = true;
return Status::ok();
}
status_t err = codec->reclaim();
if (err == WOULD_BLOCK) {
ALOGD("Wait for the client to release codec.");
usleep(kMaxReclaimWaitTimeInUs);
ALOGD("Try to reclaim again.");
err = codec->reclaim(true /* force */);
}
if (err != OK) {
ALOGW("ResourceManagerClient failed to release codec with err %d", err);
}
*_aidl_return = (err == OK);
return Status::ok();
}
Status getName(::std::string* _aidl_return) override {
_aidl_return->clear();
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
// codec is already gone.
return Status::ok();
}
AString name;
if (codec->getName(&name) == OK) {
*_aidl_return = name.c_str();
}
return Status::ok();
}
virtual ~ResourceManagerClient() {}
private:
wp<MediaCodec> mMediaCodec;
int32_t mPid;
DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
};
struct MediaCodec::ResourceManagerServiceProxy : public RefBase {
ResourceManagerServiceProxy(pid_t pid, uid_t uid,
const std::shared_ptr<IResourceManagerClient> &client);
virtual ~ResourceManagerServiceProxy();
status_t init();
// implements DeathRecipient
static void BinderDiedCallback(void* cookie);
void binderDied();
static Mutex sLockCookies;
static std::set<void*> sCookies;
static void addCookie(void* cookie);
static void removeCookie(void* cookie);
void addResource(const MediaResourceParcel &resource);
void removeResource(const MediaResourceParcel &resource);
void removeClient();
void markClientForPendingRemoval();
bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
private:
Mutex mLock;
pid_t mPid;
uid_t mUid;
std::shared_ptr<IResourceManagerService> mService;
std::shared_ptr<IResourceManagerClient> mClient;
::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
};
MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
: mPid(pid), mUid(uid), mClient(client),
mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
if (mUid == MediaCodec::kNoUid) {
mUid = AIBinder_getCallingUid();
}
if (mPid == MediaCodec::kNoPid) {
mPid = AIBinder_getCallingPid();
}
}
MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
// remove the cookie, so any in-flight death notification will get dropped
// by our handler.
removeCookie(this);
Mutex::Autolock _l(mLock);
if (mService != nullptr) {
AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
mService = nullptr;
}
}
status_t MediaCodec::ResourceManagerServiceProxy::init() {
::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
mService = IResourceManagerService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to get ResourceManagerService");
return UNKNOWN_ERROR;
}
int callerPid = AIBinder_getCallingPid();
int callerUid = AIBinder_getCallingUid();
if (mPid != callerPid || mUid != callerUid) {
// Media processes don't need special permissions to act on behalf of other processes.
if (callerUid != AID_MEDIA) {
char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
if (!checkCallingPermission(String16(permission))) {
ALOGW("%s is required to override the caller's PID for media resource management.",
permission);
return PERMISSION_DENIED;
}
}
}
// Kill clients pending removal.
mService->reclaimResourcesFromClientsPendingRemoval(mPid);
// so our handler will process the death notifications
addCookie(this);
// after this, require mLock whenever using mService
AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
return OK;
}
//static
Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
//static
void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
Mutex::Autolock _l(sLockCookies);
sCookies.insert(cookie);
}
//static
void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
Mutex::Autolock _l(sLockCookies);
sCookies.erase(cookie);
}
//static
void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
Mutex::Autolock _l(sLockCookies);
if (sCookies.find(cookie) != sCookies.end()) {
auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
thiz->binderDied();
}
}
void MediaCodec::ResourceManagerServiceProxy::binderDied() {
ALOGW("ResourceManagerService died.");
Mutex::Autolock _l(mLock);
mService = nullptr;
}
void MediaCodec::ResourceManagerServiceProxy::addResource(
const MediaResourceParcel &resource) {
std::vector<MediaResourceParcel> resources;
resources.push_back(resource);
Mutex::Autolock _l(mLock);
if (mService == nullptr) {
return;
}
mService->addResource(mPid, mUid, getId(mClient), mClient, resources);
}
void MediaCodec::ResourceManagerServiceProxy::removeResource(
const MediaResourceParcel &resource) {
std::vector<MediaResourceParcel> resources;
resources.push_back(resource);
Mutex::Autolock _l(mLock);
if (mService == nullptr) {
return;
}
mService->removeResource(mPid, getId(mClient), resources);
}
void MediaCodec::ResourceManagerServiceProxy::removeClient() {
Mutex::Autolock _l(mLock);
if (mService == nullptr) {
return;
}
mService->removeClient(mPid, getId(mClient));
}
void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
Mutex::Autolock _l(mLock);
if (mService == nullptr) {
return;
}
mService->markClientForPendingRemoval(mPid, getId(mClient));
}
bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
const std::vector<MediaResourceParcel> &resources) {
Mutex::Autolock _l(mLock);
if (mService == NULL) {
return false;
}
bool success;
Status status = mService->reclaimResource(mPid, resources, &success);
return status.isOk() && success;
}
////////////////////////////////////////////////////////////////////////////////
MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
////////////////////////////////////////////////////////////////////////////////
class MediaCodec::ReleaseSurface {
public:
explicit ReleaseSurface(uint64_t usage) {
BufferQueue::createBufferQueue(&mProducer, &mConsumer);
mSurface = new Surface(mProducer, false /* controlledByApp */);
struct ConsumerListener : public BnConsumerListener {
ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
mConsumer = consumer;
}
void onFrameAvailable(const BufferItem&) override {
BufferItem buffer;
// consume buffer
sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
}
}
wp<IGraphicBufferConsumer> mConsumer;
void onBuffersReleased() override {}
void onSidebandStreamChanged() override {}
};
sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
mConsumer->consumerConnect(listener, false);
mConsumer->setConsumerName(String8{"MediaCodec.release"});
mConsumer->setConsumerUsageBits(usage);
}
const sp<Surface> &getSurface() {
return mSurface;
}
private:
sp<IGraphicBufferProducer> mProducer;
sp<IGraphicBufferConsumer> mConsumer;
sp<Surface> mSurface;
};
////////////////////////////////////////////////////////////////////////////////
namespace {
enum {
kWhatFillThisBuffer = 'fill',
kWhatDrainThisBuffer = 'drai',
kWhatEOS = 'eos ',
kWhatStartCompleted = 'Scom',
kWhatStopCompleted = 'scom',
kWhatReleaseCompleted = 'rcom',
kWhatFlushCompleted = 'fcom',
kWhatError = 'erro',
kWhatComponentAllocated = 'cAll',
kWhatComponentConfigured = 'cCon',
kWhatInputSurfaceCreated = 'isfc',
kWhatInputSurfaceAccepted = 'isfa',
kWhatSignaledInputEOS = 'seos',
kWhatOutputFramesRendered = 'outR',
kWhatOutputBuffersChanged = 'outC',
kWhatFirstTunnelFrameReady = 'ftfR',
};
class BufferCallback : public CodecBase::BufferCallback {
public:
explicit BufferCallback(const sp<AMessage> &notify);
virtual ~BufferCallback() = default;
virtual void onInputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) override;
virtual void onOutputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) override;
private:
const sp<AMessage> mNotify;
};
BufferCallback::BufferCallback(const sp<AMessage> &notify)
: mNotify(notify) {}
void BufferCallback::onInputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatFillThisBuffer);
notify->setSize("index", index);
notify->setObject("buffer", buffer);
notify->post();
}
void BufferCallback::onOutputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatDrainThisBuffer);
notify->setSize("index", index);
notify->setObject("buffer", buffer);
notify->post();
}
class CodecCallback : public CodecBase::CodecCallback {
public:
explicit CodecCallback(const sp<AMessage> &notify);
virtual ~CodecCallback() = default;
virtual void onEos(status_t err) override;
virtual void onStartCompleted() override;
virtual void onStopCompleted() override;
virtual void onReleaseCompleted() override;
virtual void onFlushCompleted() override;
virtual void onError(status_t err, enum ActionCode actionCode) override;
virtual void onComponentAllocated(const char *componentName) override;
virtual void onComponentConfigured(
const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
virtual void onInputSurfaceCreated(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat,
const sp<BufferProducerWrapper> &inputSurface) override;
virtual void onInputSurfaceCreationFailed(status_t err) override;
virtual void onInputSurfaceAccepted(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat) override;
virtual void onInputSurfaceDeclined(status_t err) override;
virtual void onSignaledInputEOS(status_t err) override;
virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
virtual void onOutputBuffersChanged() override;
virtual void onFirstTunnelFrameReady() override;
private:
const sp<AMessage> mNotify;
};
CodecCallback::CodecCallback(const sp<AMessage> &notify) : mNotify(notify) {}
void CodecCallback::onEos(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatEOS);
notify->setInt32("err", err);
notify->post();
}
void CodecCallback::onStartCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatStartCompleted);
notify->post();
}
void CodecCallback::onStopCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatStopCompleted);
notify->post();
}
void CodecCallback::onReleaseCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatReleaseCompleted);
notify->post();
}
void CodecCallback::onFlushCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatFlushCompleted);
notify->post();
}
void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatError);
notify->setInt32("err", err);
notify->setInt32("actionCode", actionCode);
notify->post();
}
void CodecCallback::onComponentAllocated(const char *componentName) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatComponentAllocated);
notify->setString("componentName", componentName);
notify->post();
}
void CodecCallback::onComponentConfigured(
const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatComponentConfigured);
notify->setMessage("input-format", inputFormat);
notify->setMessage("output-format", outputFormat);
notify->post();
}
void CodecCallback::onInputSurfaceCreated(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat,
const sp<BufferProducerWrapper> &inputSurface) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceCreated);
notify->setMessage("input-format", inputFormat);
notify->setMessage("output-format", outputFormat);
notify->setObject("input-surface", inputSurface);
notify->post();
}
void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceCreated);
notify->setInt32("err", err);
notify->post();
}
void CodecCallback::onInputSurfaceAccepted(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceAccepted);
notify->setMessage("input-format", inputFormat);
notify->setMessage("output-format", outputFormat);
notify->post();
}
void CodecCallback::onInputSurfaceDeclined(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceAccepted);
notify->setInt32("err", err);
notify->post();
}
void CodecCallback::onSignaledInputEOS(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatSignaledInputEOS);
if (err != OK) {
notify->setInt32("err", err);
}
notify->post();
}
void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatOutputFramesRendered);
if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
notify->post();
}
}
void CodecCallback::onOutputBuffersChanged() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatOutputBuffersChanged);
notify->post();
}
void CodecCallback::onFirstTunnelFrameReady() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatFirstTunnelFrameReady);
notify->post();
}
static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
switch (domain) {
case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
default: return MediaResourceSubType::kUnspecifiedSubType;
}
}
static const char * toCodecMode(MediaCodec::Domain domain) {
switch (domain) {
case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
default: return kCodecModeUnknown;
}
}
} // namespace
////////////////////////////////////////////////////////////////////////////////
// static
sp<MediaCodec> MediaCodec::CreateByType(
const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
uid_t uid) {
sp<AMessage> format;
return CreateByType(looper, mime, encoder, err, pid, uid, format);
}
sp<MediaCodec> MediaCodec::CreateByType(
const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
uid_t uid, sp<AMessage> format) {
Vector<AString> matchingCodecs;
MediaCodecList::findMatchingCodecs(
mime.c_str(),
encoder,
0,
format,
&matchingCodecs);
if (err != NULL) {
*err = NAME_NOT_FOUND;
}
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
AString componentName = matchingCodecs[i];
status_t ret = codec->init(componentName);
if (err != NULL) {
*err = ret;
}
if (ret == OK) {
return codec;
}
ALOGD("Allocating component '%s' failed (%d), try next one.",
componentName.c_str(), ret);
}
return NULL;
}
// static
sp<MediaCodec> MediaCodec::CreateByComponentName(
const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
const status_t ret = codec->init(name);
if (err != NULL) {
*err = ret;
}
return ret == OK ? codec : NULL; // NULL deallocates codec.
}
// static
sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
if (pluginSurface != nullptr) {
return pluginSurface;
}
OMXClient client;
if (client.connect() != OK) {
ALOGE("Failed to connect to OMX to create persistent input surface.");
return NULL;
}
sp<IOMX> omx = client.interface();
sp<IGraphicBufferProducer> bufferProducer;
sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
if (err != OK) {
ALOGE("Failed to create persistent input surface.");
return NULL;
}
return new PersistentSurface(bufferProducer, bufferSource);
}
MediaCodec::MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
: mState(UNINITIALIZED),
mReleasedByResourceManager(false),
mLooper(looper),
mCodec(NULL),
mReplyID(0),
mFlags(0),
mStickyError(OK),
mSoftRenderer(NULL),
mDomain(DOMAIN_UNKNOWN),
mWidth(0),
mHeight(0),
mRotationDegrees(0),
mConfigColorTransfer(-1),
mHDRStaticInfo(false),
mHDR10PlusInfo(false),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
mDequeueOutputReplyID(0),
mTunneledInputWidth(0),
mTunneledInputHeight(0),
mTunneled(false),
mTunnelPeekState(TunnelPeekState::kLegacyMode),
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
mIsSurfaceToScreen(false),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
mLatestEncodedPtsUs(INT64_MIN),
mFramesEncoded(0),
mNumLowLatencyEnables(0),
mNumLowLatencyDisables(0),
mIsLowLatencyModeOn(false),
mIndexOfFirstFrameWhenLowLatencyOn(-1),
mInputBufferCounter(0),
mGetCodecBase(getCodecBase),
mGetCodecInfo(getCodecInfo) {
mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid));
if (!mGetCodecBase) {
mGetCodecBase = [](const AString &name, const char *owner) {
return GetCodecBase(name, owner);
};
}
if (!mGetCodecInfo) {
mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
*info = nullptr;
const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
if (!mcl) {
return NO_INIT; // if called from Java should raise IOException
}
AString tmp = name;
if (tmp.endsWith(".secure")) {
tmp.erase(tmp.size() - 7, 7);
}
for (const AString &codecName : { name, tmp }) {
ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
if (codecIdx < 0) {
continue;
}
*info = mcl->getCodecInfo(codecIdx);
return OK;
}
return NAME_NOT_FOUND;
};
}
initMediametrics();
}
MediaCodec::~MediaCodec() {
CHECK_EQ(mState, UNINITIALIZED);
mResourceManagerProxy->removeClient();
flushMediametrics();
}
void MediaCodec::initMediametrics() {
if (mMetricsHandle == 0) {
mMetricsHandle = mediametrics_create(kCodecKeyName);
}
mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
{
Mutex::Autolock al(mRecentLock);
for (int i = 0; i<kRecentLatencyFrames; i++) {
mRecentSamples[i] = kRecentSampleInvalid;
}
mRecentHead = 0;
}
{
Mutex::Autolock al(mLatencyLock);
mBuffersInFlight.clear();
mNumLowLatencyEnables = 0;
mNumLowLatencyDisables = 0;
mIsLowLatencyModeOn = false;
mIndexOfFirstFrameWhenLowLatencyOn = -1;
mInputBufferCounter = 0;
}
mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
}
void MediaCodec::updateMediametrics() {
ALOGV("MediaCodec::updateMediametrics");
if (mMetricsHandle == 0) {
return;
}
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
if (kEmitHistogram) {
// and the histogram itself
std::string hist = mLatencyHist.emit();
mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
}
}
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
if (playbackDurationSec > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
}
if (mLifetimeStartNs > 0) {
nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
}
if (mBytesEncoded) {
Mutex::Autolock al(mOutputStatsLock);
mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
int64_t duration = 0;
if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
}
mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
}
{
Mutex::Autolock al(mLatencyLock);
mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
mIndexOfFirstFrameWhenLowLatencyOn);
}
mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
#if 0
// enable for short term, only while debugging
updateEphemeralMediametrics(mMetricsHandle);
#endif
}
void MediaCodec::updateHDRFormatMetric() {
int32_t profile = -1;
AString mediaType;
if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
&& mOutputFormat->findString("mime", &mediaType)) {
hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
}
}
hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
const AString &mediaType) {
switch (transfer) {
case COLOR_TRANSFER_ST2084:
if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
switch (profile) {
case VP9Profile2HDR:
return HDR_FORMAT_HDR10;
case VP9Profile2HDR10Plus:
return HDR_FORMAT_HDR10PLUS;
default:
return HDR_FORMAT_NONE;
}
} else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
switch (profile) {
case AV1ProfileMain10HDR10:
return HDR_FORMAT_HDR10;
case AV1ProfileMain10HDR10Plus:
return HDR_FORMAT_HDR10PLUS;
default:
return HDR_FORMAT_NONE;
}
} else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
switch (profile) {
case HEVCProfileMain10HDR10:
return HDR_FORMAT_HDR10;
case HEVCProfileMain10HDR10Plus:
return HDR_FORMAT_HDR10PLUS;
default:
return HDR_FORMAT_NONE;
}
} else {
return HDR_FORMAT_NONE;
}
case COLOR_TRANSFER_HLG:
if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
return HDR_FORMAT_HLG;
} else {
// TODO: DOLBY format
return HDR_FORMAT_NONE;
}
default:
return HDR_FORMAT_NONE;
}
}
void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
ALOGD("MediaCodec::updateEphemeralMediametrics()");
if (item == 0) {
return;
}
Histogram recentHist;
// build an empty histogram
recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
// stuff it with the samples in the ring buffer
{
Mutex::Autolock al(mRecentLock);
for (int i=0; i<kRecentLatencyFrames; i++) {
if (mRecentSamples[i] != kRecentSampleInvalid) {
recentHist.insert(mRecentSamples[i]);
}
}
}
// spit the data (if any) into the supplied analytics record
if (recentHist.getCount()!= 0 ) {
mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
if (kEmitHistogram) {
// and the histogram itself
std::string hist = recentHist.emit();
mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
}
}
}
void MediaCodec::flushMediametrics() {
updateMediametrics();
if (mMetricsHandle != 0) {
if (mediametrics_count(mMetricsHandle) > 0) {
mediametrics_selfRecord(mMetricsHandle);
}
mediametrics_delete(mMetricsHandle);
mMetricsHandle = 0;
}
}
void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
int32_t lowLatency = 0;
if (msg->findInt32("low-latency", &lowLatency)) {
Mutex::Autolock al(mLatencyLock);
if (lowLatency > 0) {
++mNumLowLatencyEnables;
// This is just an estimate since low latency mode change happens ONLY at key frame
mIsLowLatencyModeOn = true;
} else if (lowLatency == 0) {
++mNumLowLatencyDisables;
// This is just an estimate since low latency mode change happens ONLY at key frame
mIsLowLatencyModeOn = false;
}
}
}
constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
switch(state) {
case TunnelPeekState::kLegacyMode:
return "LegacyMode";
case TunnelPeekState::kEnabledNoBuffer:
return "EnabledNoBuffer";
case TunnelPeekState::kDisabledNoBuffer:
return "DisabledNoBuffer";
case TunnelPeekState::kBufferDecoded:
return "BufferDecoded";
case TunnelPeekState::kBufferRendered:
return "BufferRendered";
case TunnelPeekState::kDisabledQueued:
return "DisabledQueued";
case TunnelPeekState::kEnabledQueued:
return "EnabledQueued";
default:
return default_string;
}
}
void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
int32_t tunnelPeek = 0;
if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
return;
}
TunnelPeekState previousState = mTunnelPeekState;
if(tunnelPeek == 0){
switch (mTunnelPeekState) {
case TunnelPeekState::kLegacyMode:
msg->setInt32("android._tunnel-peek-set-legacy", 0);
[[fallthrough]];
case TunnelPeekState::kEnabledNoBuffer:
mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
break;
case TunnelPeekState::kEnabledQueued:
mTunnelPeekState = TunnelPeekState::kDisabledQueued;
break;
default:
ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
return;
}
} else {
switch (mTunnelPeekState) {
case TunnelPeekState::kLegacyMode:
msg->setInt32("android._tunnel-peek-set-legacy", 0);
[[fallthrough]];
case TunnelPeekState::kDisabledNoBuffer:
mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
break;
case TunnelPeekState::kDisabledQueued:
mTunnelPeekState = TunnelPeekState::kEnabledQueued;
break;
case TunnelPeekState::kBufferDecoded:
msg->setInt32("android._trigger-tunnel-peek", 1);
mTunnelPeekState = TunnelPeekState::kBufferRendered;
break;
default:
ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
return;
}
}
ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
}
void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
int what = 0;
msg->findInt32("what", &what);
if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
static bool logged = false;
if (!logged) {
logged = true;
ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
}
return;
}
// Playback duration only counts if the buffers are going to the screen.
if (!mIsSurfaceToScreen) {
return;
}
int64_t renderTimeNs;
size_t index = 0;
while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
}
}
bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
{
if (nbuckets <= 0 || width <= 0) {
return false;
}
// get histogram buckets
if (nbuckets == mBucketCount && mBuckets != NULL) {
// reuse our existing buffer
memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
} else {
// get a new pre-zeroed buffer
int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
if (newbuckets == NULL) {
goto bad;
}
if (mBuckets != NULL)
free(mBuckets);
mBuckets = newbuckets;
}
mWidth = width;
mFloor = floor;
mCeiling = floor + nbuckets * width;
mBucketCount = nbuckets;
mMin = INT64_MAX;
mMax = INT64_MIN;
mSum = 0;
mCount = 0;
mBelow = mAbove = 0;
return true;
bad:
if (mBuckets != NULL) {
free(mBuckets);
mBuckets = NULL;
}
return false;
}
void MediaCodec::Histogram::insert(int64_t sample)
{
// histogram is not set up
if (mBuckets == NULL) {
return;
}
mCount++;
mSum += sample;
if (mMin > sample) mMin = sample;
if (mMax < sample) mMax = sample;
if (sample < mFloor) {
mBelow++;
} else if (sample >= mCeiling) {
mAbove++;
} else {
int64_t slot = (sample - mFloor) / mWidth;
CHECK(slot < mBucketCount);
mBuckets[slot]++;
}
return;
}
std::string MediaCodec::Histogram::emit()
{
std::string value;
char buffer[64];
// emits: width,Below{bucket0,bucket1,...., bucketN}above
// unconfigured will emit: 0,0{}0
// XXX: is this best representation?
snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
mFloor, mWidth, mBelow);
value = buffer;
for (int i = 0; i < mBucketCount; i++) {
if (i != 0) {
value = value + ",";
}
snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
value = value + buffer;
}
snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
value = value + buffer;
return value;
}
// when we send a buffer to the codec;
void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
// only enqueue if we have a legitimate time
if (presentationUs <= 0) {
ALOGV("presentation time: %" PRId64, presentationUs);
return;
}
if (mBatteryChecker != nullptr) {
mBatteryChecker->onCodecActivity([this] () {
mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
});
}
if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
mBytesInput += buffer->size();
mFramesInput++;
}
const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
BufferFlightTiming_t startdata = { presentationUs, nowNs };
{
// mutex access to mBuffersInFlight and other stats
Mutex::Autolock al(mLatencyLock);
// XXX: we *could* make sure that the time is later than the end of queue
// as part of a consistency check...
mBuffersInFlight.push_back(startdata);
if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
}
++mInputBufferCounter;
}
}
// when we get a buffer back from the codec
void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
CHECK_NE(mState, UNINITIALIZED);
if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
int32_t flags = 0;
(void) buffer->meta()->findInt32("flags", &flags);
// some of these frames, we don't want to count
// standalone EOS.... has an invalid timestamp
if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
mBytesEncoded += buffer->size();
mFramesEncoded++;
Mutex::Autolock al(mOutputStatsLock);
int64_t timeUs = 0;
if (buffer->meta()->findInt64("timeUs", &timeUs)) {
if (timeUs > mLatestEncodedPtsUs) {
mLatestEncodedPtsUs = timeUs;
}
// can't chain as an else-if or this never triggers
if (timeUs < mEarliestEncodedPtsUs) {
mEarliestEncodedPtsUs = timeUs;
}
}
}
}
// mutex access to mBuffersInFlight and other stats
Mutex::Autolock al(mLatencyLock);
// how long this buffer took for the round trip through the codec
// NB: pipelining can/will make these times larger. e.g., if each packet
// is always 2 msec and we have 3 in flight at any given time, we're going to
// see "6 msec" as an answer.
// ignore stuff with no presentation time
if (presentationUs <= 0) {
ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
mLatencyUnknown++;
return;
}
if (mBatteryChecker != nullptr) {
mBatteryChecker->onCodecActivity([this] () {
mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
});
}
BufferFlightTiming_t startdata;
bool valid = false;
while (mBuffersInFlight.size() > 0) {
startdata = *mBuffersInFlight.begin();
ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
startdata.presentationUs, startdata.startedNs);
if (startdata.presentationUs == presentationUs) {
// a match
ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
startdata.presentationUs, presentationUs);
mBuffersInFlight.pop_front();
valid = true;
break;
} else if (startdata.presentationUs < presentationUs) {
// we must have missed the match for this, drop it and keep looking
ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
startdata.presentationUs, presentationUs);
mBuffersInFlight.pop_front();
continue;
} else {
// head is after, so we don't have a frame for ourselves
ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
" we have nothing to pair with",
startdata.presentationUs, presentationUs);
mLatencyUnknown++;
return;
}
}
if (!valid) {
ALOGV("-- empty queue, so ignore that.");
mLatencyUnknown++;
return;
}
// now start our calculations
const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
mLatencyHist.insert(latencyUs);
// push into the recent samples
{
Mutex::Autolock al(mRecentLock);
if (mRecentHead >= kRecentLatencyFrames) {
mRecentHead = 0;
}
mRecentSamples[mRecentHead++] = latencyUs;
}
}
// static
status_t MediaCodec::PostAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response) {
status_t err = msg->postAndAwaitResponse(response);
if (err != OK) {
return err;
}
if (!(*response)->findInt32("err", &err)) {
err = OK;
}
return err;
}
void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
PostReplyWithError(replyID, err);
}
void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
int32_t finalErr = err;
if (mReleasedByResourceManager) {
// override the err code if MediaCodec has been released by ResourceManager.
finalErr = DEAD_OBJECT;
}
sp<AMessage> response = new AMessage;
response->setInt32("err", finalErr);
response->postReply(replyID);
}
static CodecBase *CreateCCodec() {
return new CCodec;
}
//static
sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
if (owner) {
if (strcmp(owner, "default") == 0) {
return new ACodec;
} else if (strncmp(owner, "codec2", 6) == 0) {
return CreateCCodec();
}
}
if (name.startsWithIgnoreCase("c2.")) {
return CreateCCodec();
} else if (name.startsWithIgnoreCase("omx.")) {
// at this time only ACodec specifies a mime type.
return new ACodec;
} else {
return NULL;
}
}
struct CodecListCache {
CodecListCache()
: mCodecInfoMap{[] {
const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
size_t count = mcl->countCodecs();
std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
for (size_t i = 0; i < count; ++i) {
sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
codecInfoMap.emplace(info->getCodecName(), info);
}
return codecInfoMap;
}()} {
}
const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
};
static const CodecListCache &GetCodecListCache() {
static CodecListCache sCache{};
return sCache;
}
status_t MediaCodec::init(const AString &name) {
status_t err = mResourceManagerProxy->init();
if (err != OK) {
mCodec = NULL; // remove the codec
return err;
}
// save init parameters for reset
mInitName = name;
// Current video decoders do not return from OMX_FillThisBuffer
// quickly, violating the OpenMAX specs, until that is remedied
// we need to invest in an extra looper to free the main event
// queue.
mCodecInfo.clear();
bool secureCodec = false;
const char *owner = "";
if (!name.startsWith("android.filter.")) {
err = mGetCodecInfo(name, &mCodecInfo);
if (err != OK) {
mCodec = NULL; // remove the codec.
return err;
}
if (mCodecInfo == nullptr) {
ALOGE("Getting codec info with name '%s' failed", name.c_str());
return NAME_NOT_FOUND;
}
secureCodec = name.endsWith(".secure");
Vector<AString> mediaTypes;
mCodecInfo->getSupportedMediaTypes(&mediaTypes);
for (size_t i = 0; i < mediaTypes.size(); ++i) {
if (mediaTypes[i].startsWith("video/")) {
mDomain = DOMAIN_VIDEO;
break;
} else if (mediaTypes[i].startsWith("audio/")) {
mDomain = DOMAIN_AUDIO;
break;
} else if (mediaTypes[i].startsWith("image/")) {
mDomain = DOMAIN_IMAGE;
break;
}
}
owner = mCodecInfo->getOwnerName();
}
mCodec = mGetCodecBase(name, owner);
if (mCodec == NULL) {
ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
return NAME_NOT_FOUND;
}
if (mDomain == DOMAIN_VIDEO) {
// video codec needs dedicated looper
if (mCodecLooper == NULL) {
status_t err = OK;
mCodecLooper = new ALooper;
mCodecLooper->setName("CodecLooper");
err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
if (OK != err) {
ALOGE("Codec Looper failed to start");
return err;
}
}
mCodecLooper->registerHandler(mCodec);
} else {
mLooper->registerHandler(mCodec);
}
mLooper->registerHandler(this);
mCodec->setCallback(
std::unique_ptr<CodecBase::CodecCallback>(
new CodecCallback(new AMessage(kWhatCodecNotify, this))));
mBufferChannel = mCodec->getBufferChannel();
mBufferChannel->setCallback(
std::unique_ptr<CodecBase::BufferCallback>(
new BufferCallback(new AMessage(kWhatCodecNotify, this))));
sp<AMessage> msg = new AMessage(kWhatInit, this);
if (mCodecInfo) {
msg->setObject("codecInfo", mCodecInfo);
// name may be different from mCodecInfo->getCodecName() if we stripped
// ".secure"
}
msg->setString("name", name);
if (mMetricsHandle != 0) {
mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
}
if (mDomain == DOMAIN_VIDEO) {
mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
}
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
if (!mResourceManagerProxy->reclaimResource(resources)) {
break;
}
}
sp<AMessage> response;
err = PostAndAwaitResponse(msg, &response);
if (!isResourceError(err)) {
break;
}
}
return err;
}
status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
msg->setMessage("callback", callback);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> &notify) {
sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
msg->setMessage("on-frame-rendered", notify);
return msg->post();
}
status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
msg->setMessage("first-tunnel-frame-ready", notify);
return msg->post();
}
/*
* MediaFormat Shaping forward declarations
* including the property name we use for control.
*/
static int enableMediaFormatShapingDefault = 1;
static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
bool reverse);
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &nativeWindow,
const sp<ICrypto> &crypto,
uint32_t flags) {
return configure(format, nativeWindow, crypto, NULL, flags);
}
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &surface,
const sp<ICrypto> &crypto,
const sp<IDescrambler> &descrambler,
uint32_t flags) {
sp<AMessage> msg = new AMessage(kWhatConfigure, this);
// TODO: validity check log-session-id: it should be a 32-hex-digit.
format->findString("log-session-id", &mLogSessionId);
if (mMetricsHandle != 0) {
int32_t profile = 0;
if (format->findInt32("profile", &profile)) {
mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
}
int32_t level = 0;
if (format->findInt32("level", &level)) {
mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
}
mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
(flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
}
if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
format->findInt32("width", &mWidth);
format->findInt32("height", &mHeight);
if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
mRotationDegrees = 0;
}
if (mMetricsHandle != 0) {
mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
int32_t maxWidth = 0;
if (format->findInt32("max-width", &maxWidth)) {
mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
}
int32_t maxHeight = 0;
if (format->findInt32("max-height", &maxHeight)) {
mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
}
int32_t colorFormat = -1;
if (format->findInt32("color-format", &colorFormat)) {
mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
}
if (mDomain == DOMAIN_VIDEO) {
float frameRate = -1.0;
if (format->findFloat("frame-rate", &frameRate)) {
mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
}
float captureRate = -1.0;
if (format->findFloat("capture-rate", &captureRate)) {
mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
}
float operatingRate = -1.0;
if (format->findFloat("operating-rate", &operatingRate)) {
mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
}
int32_t priority = -1;
if (format->findInt32("priority", &priority)) {
mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
}
}
int32_t colorStandard = -1;
if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
}
int32_t colorRange = -1;
if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
}
int32_t colorTransfer = -1;
if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
mConfigColorTransfer = colorTransfer;
mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
}
HDRStaticInfo info;
if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
&& ColorUtils::isHDRStaticInfoValid(&info)) {
mHDRStaticInfo = true;
}
}
// Prevent possible integer overflow in downstream code.
if (mWidth < 0 || mHeight < 0 ||
(uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
return BAD_VALUE;
}
} else {
if (mMetricsHandle != 0) {
int32_t channelCount;
if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
}
int32_t sampleRate;
if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
}
}
}
if (flags & CONFIGURE_FLAG_ENCODE) {
int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
enableMediaFormatShapingDefault);
if (!enableShaping) {
ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
if (mMetricsHandle != 0) {
mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
}
} else {
(void) shapeMediaFormat(format, flags);
// XXX: do we want to do this regardless of shaping enablement?
mapFormat(mComponentName, format, nullptr, false);
}
}
// push min/max QP to MediaMetrics after shaping
if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
int32_t qpIMin = -1;
if (format->findInt32("video-qp-i-min", &qpIMin)) {
mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
}
int32_t qpIMax = -1;
if (format->findInt32("video-qp-i-max", &qpIMax)) {
mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
}
int32_t qpPMin = -1;
if (format->findInt32("video-qp-p-min", &qpPMin)) {
mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
}
int32_t qpPMax = -1;
if (format->findInt32("video-qp-p-max", &qpPMax)) {
mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
}
int32_t qpBMin = -1;
if (format->findInt32("video-qp-b-min", &qpBMin)) {
mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
}
int32_t qpBMax = -1;
if (format->findInt32("video-qp-b-max", &qpBMax)) {
mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
}
}
updateLowLatency(format);
msg->setMessage("format", format);
msg->setInt32("flags", flags);
msg->setObject("surface", surface);
if (crypto != NULL || descrambler != NULL) {
if (crypto != NULL) {
msg->setPointer("crypto", crypto.get());
} else {
msg->setPointer("descrambler", descrambler.get());
}
if (mMetricsHandle != 0) {
mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
}
} else if (mFlags & kFlagIsSecure) {
ALOGW("Crypto or descrambler should be given for secure codec");
}
// save msg for reset
mConfigureMsg = msg;
sp<AMessage> callback = mCallback;
status_t err;
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
toMediaResourceSubType(mDomain)));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
resources.push_back(MediaResource::GraphicMemoryResource(1));
for (int i = 0; i <= kMaxRetry; ++i) {
sp<AMessage> response;
err = PostAndAwaitResponse(msg, &response);
if (err != OK && err != INVALID_OPERATION) {
if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
break;
}
// MediaCodec now set state to UNINITIALIZED upon any fatal error.
// To maintain backward-compatibility, do a reset() to put codec
// back into INITIALIZED state.
// But don't reset if the err is INVALID_OPERATION, which means
// the configure failure is due to wrong state.
ALOGE("configure failed with err 0x%08x, resetting...", err);
status_t err2 = reset();
if (err2 != OK) {
ALOGE("retrying configure: failed to reset codec (%08x)", err2);
break;
}
if (callback != nullptr) {
err2 = setCallback(callback);
if (err2 != OK) {
ALOGE("retrying configure: failed to set callback (%08x)", err2);
break;
}
}
}
if (!isResourceError(err)) {
break;
}
}
return err;
}
// Media Format Shaping support
//
static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
static bool sIsHandheld = true;
static bool connectFormatShaper() {
static std::once_flag sCheckOnce;
ALOGV("connectFormatShaper...");
std::call_once(sCheckOnce, [&](){
void *libHandle = NULL;
nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
// prefer any copy in the mainline module
//
android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
AString libraryName = "libmediaformatshaper.so";
if (mediaNs != NULL) {
static const android_dlextinfo dlextinfo = {
.flags = ANDROID_DLEXT_USE_NAMESPACE,
.library_namespace = mediaNs,
};
AString libraryMainline = "/apex/com.android.media/";
#if __LP64__
libraryMainline.append("lib64/");
#else
libraryMainline.append("lib/");
#endif
libraryMainline.append(libraryName);
libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
&dlextinfo);
if (libHandle != NULL) {
sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
dlsym(libHandle, "shaper_ops");
} else {
ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
libraryMainline.c_str());
}
} else {
ALOGV("connectFormatShaper: couldn't find media namespace.");
}
// fall back to the system partition, if present.
//
if (sShaperOps == NULL) {
libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
if (libHandle != NULL) {
sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
dlsym(libHandle, "shaper_ops");
} else {
ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
}
}
if (sShaperOps != nullptr
&& sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
sShaperOps->version);
sShaperOps = nullptr;
}
if (sShaperOps != nullptr) {
ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
}
nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
(loading_finished - loading_started)/1000);
// we also want to know whether this is a handheld device
// start with assumption that the device is handheld.
sIsHandheld = true;
sp<IServiceManager> serviceMgr = defaultServiceManager();
sp<content::pm::IPackageManagerNative> packageMgr;
if (serviceMgr.get() != nullptr) {
sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
}
// if we didn't get serviceMgr, we'll leave packageMgr as default null
if (packageMgr != nullptr) {
// MUST have these
static const String16 featuresNeeded[] = {
String16("android.hardware.touchscreen")
};
// these must be present to be a handheld
for (::android::String16 required : featuresNeeded) {
bool hasFeature = false;
binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
if (!status.isOk()) {
ALOGE("%s: hasSystemFeature failed: %s",
__func__, status.exceptionMessage().c_str());
continue;
}
ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
if (!hasFeature) {
ALOGV("... which means we are not handheld");
sIsHandheld = false;
break;
}
}
// MUST NOT have these
static const String16 featuresDisallowed[] = {
String16("android.hardware.type.automotive"),
String16("android.hardware.type.television"),
String16("android.hardware.type.watch")
};
// any of these present -- we aren't a handheld
for (::android::String16 forbidden : featuresDisallowed) {
bool hasFeature = false;
binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
if (!status.isOk()) {
ALOGE("%s: hasSystemFeature failed: %s",
__func__, status.exceptionMessage().c_str());
continue;
}
ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
if (hasFeature) {
ALOGV("... which means we are not handheld");
sIsHandheld = false;
break;
}
}
}
});
return true;
}
#if 0
// a construct to force the above dlopen() to run very early.
// goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
// failure of this means that cold start of those apps is slower by the time to dlopen()
// TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
//
static bool forceEarlyLoadingShaper = connectFormatShaper();
#endif
// parse the codec's properties: mapping, whether it meets min quality, etc
// and pass them into the video quality code
//
static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
sp<MediaCodecInfo> codecInfo, AString mediaType) {
sp<MediaCodecInfo::Capabilities> capabilities =
codecInfo->getCapabilitiesFor(mediaType.c_str());
if (capabilities == nullptr) {
ALOGI("no capabilities as part of the codec?");
} else {
const sp<AMessage> &details = capabilities->getDetails();
AString mapTarget;
int count = details->countEntries();
for(int ix = 0; ix < count; ix++) {
AMessage::Type entryType;
const char *mapSrc = details->getEntryNameAt(ix, &entryType);
// XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
//
static const char *featurePrefix = "feature-";
static const int featurePrefixLen = strlen(featurePrefix);
static const char *tuningPrefix = "tuning-";
static const int tuningPrefixLen = strlen(tuningPrefix);
static const char *mappingPrefix = "mapping-";
static const int mappingPrefixLen = strlen(mappingPrefix);
if (mapSrc == NULL) {
continue;
} else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
int32_t intValue;
if (details->findInt32(mapSrc, &intValue)) {
ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
(void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
intValue);
}
continue;
} else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
AString value;
if (details->findString(mapSrc, &value)) {
ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
(void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
value.c_str());
}
continue;
} else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
AString target;
if (details->findString(mapSrc, &target)) {
ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
target.c_str());
// key is really "kind-key"
// separate that, so setMap() sees the triple kind, key, value
const char *kind = &mapSrc[mappingPrefixLen];
const char *sep = strchr(kind, '-');
const char *key = sep+1;
if (sep != NULL) {
std::string xkind = std::string(kind, sep-kind);
(void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
key, target.c_str());
}
}
}
}
}
// we also carry in the codec description whether we are on a handheld device.
// this info is eventually used by both the Codec and the C2 machinery to inform
// the underlying codec whether to do any shaping.
//
if (sIsHandheld) {
// set if we are indeed a handheld device (or in future 'any eligible device'
// missing on devices that aren't eligible for minimum quality enforcement.
(void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
// strictly speaking, it's a tuning, but those are strings and feature stores int
(void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
}
}
status_t MediaCodec::setupFormatShaper(AString mediaType) {
ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
mComponentName.c_str(), mediaType.c_str());
nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
// someone might have beaten us to it.
mediaformatshaper::shaperHandle_t shaperHandle;
shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
if (shaperHandle != nullptr) {
ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
return OK;
}
// we get to build & register one
shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
if (shaperHandle == nullptr) {
ALOGW("unable to create a shaper for cocodec %s mediaType %s",
mComponentName.c_str(), mediaType.c_str());
return OK;
}
(void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
shaperHandle = sShaperOps->registerShaper(shaperHandle,
mComponentName.c_str(), mediaType.c_str());
nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
return OK;
}
// Format Shaping
// Mapping and Manipulation of encoding parameters
//
// All of these decisions are pushed into the shaper instead of here within MediaCodec.
// this includes decisions based on whether the codec implements minimum quality bars
// itself or needs to be shaped outside of the codec.
// This keeps all those decisions in one place.
// It also means that we push some extra decision information (is this a handheld device
// or one that is otherwise eligible for minimum quality manipulation, which generational
// quality target is in force, etc). This allows those values to be cached in the
// per-codec structures that are done 1 time within a process instead of for each
// codec instantiation.
//
status_t MediaCodec::shapeMediaFormat(
const sp<AMessage> &format,
uint32_t flags) {
ALOGV("shapeMediaFormat entry");
if (!(flags & CONFIGURE_FLAG_ENCODE)) {
ALOGW("shapeMediaFormat: not encoder");
return OK;
}
if (mCodecInfo == NULL) {
ALOGW("shapeMediaFormat: no codecinfo");
return OK;
}
AString mediaType;
if (!format->findString("mime", &mediaType)) {
ALOGW("shapeMediaFormat: no mediaType information");
return OK;
}
// make sure we have the function entry points for the shaper library
//
connectFormatShaper();
if (sShaperOps == nullptr) {
ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
return OK;
}
// find the shaper information for this codec+mediaType pair
//
mediaformatshaper::shaperHandle_t shaperHandle;
shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
if (shaperHandle == nullptr) {
setupFormatShaper(mediaType);
shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
}
if (shaperHandle == nullptr) {
ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
mComponentName.c_str(), mediaType.c_str());
return OK;
}
// run the shaper
//
ALOGV("Shaping input: %s", format->debugString(0).c_str());
sp<AMessage> updatedFormat = format->dup();
AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
if (result == 0) {
AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
size_t changeCount = deltas->countEntries();
ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
if (mMetricsHandle != 0) {
mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
}
if (changeCount > 0) {
if (mMetricsHandle != 0) {
// save some old properties before we fold in the new ones
int32_t bitrate;
if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
}
int32_t qpIMin = -1;
if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
}
int32_t qpIMax = -1;
if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
}
int32_t qpPMin = -1;
if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
}
int32_t qpPMax = -1;
if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
}
int32_t qpBMin = -1;
if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
}
int32_t qpBMax = -1;
if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
}
}
// NB: for any field in both format and deltas, the deltas copy wins
format->extend(deltas);
}
}
AMediaFormat_delete(updatedNdkFormat);
return OK;
}
static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
bool reverse) {
AString mediaType;
if (!format->findString("mime", &mediaType)) {
ALOGV("mapFormat: no mediaType information");
return;
}
ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
mediaType.c_str(), kind ? kind : "<all>", reverse);
// make sure we have the function entry points for the shaper library
//
#if 0
// let's play the faster "only do mapping if we've already loaded the library
connectFormatShaper();
#endif
if (sShaperOps == nullptr) {
ALOGV("mapFormat: no MediaFormatShaper hooks available");
return;
}
// find the shaper information for this codec+mediaType pair
//
mediaformatshaper::shaperHandle_t shaperHandle;
shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
if (shaperHandle == nullptr) {
ALOGV("mapFormat: no shaper handle");
return;
}
const char **mappings;
if (reverse)
mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
else
mappings = sShaperOps->getMappings(shaperHandle, kind);
if (mappings == nullptr) {
ALOGV("no mappings returned");
return;
}
ALOGV("Pre-mapping: %s", format->debugString(2).c_str());
// do the mapping
//
int entries = format->countEntries();
for (int i = 0; ; i += 2) {
if (mappings[i] == nullptr) {
break;
}
size_t ix = format->findEntryByName(mappings[i]);
if (ix < entries) {
ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
status_t status = format->setEntryNameAt(ix, mappings[i+1]);
if (status != OK) {
ALOGW("Unable to map from '%s' to '%s': status %d",
mappings[i], mappings[i+1], status);
}
}
}
ALOGV("Post-mapping: %s", format->debugString(2).c_str());
// reclaim the mapping memory
for (int i = 0; ; i += 2) {
if (mappings[i] == nullptr) {
break;
}
free((void*)mappings[i]);
free((void*)mappings[i + 1]);
}
free(mappings);
mappings = nullptr;
}
//
// end of Format Shaping hooks within MediaCodec
//
status_t MediaCodec::releaseCrypto()
{
ALOGV("releaseCrypto");
sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
sp<AMessage> response;
status_t status = msg->postAndAwaitResponse(&response);
if (status == OK && response != NULL) {
CHECK(response->findInt32("status", &status));
ALOGV("releaseCrypto ret: %d ", status);
}
else {
ALOGE("releaseCrypto err: %d", status);
}
return status;
}
void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
{
status_t status = INVALID_OPERATION;
if (mCrypto != NULL) {
ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
mBufferChannel->setCrypto(NULL);
// TODO change to ALOGV
ALOGD("onReleaseCrypto: [before clear] mCrypto: %p (%d)",
mCrypto.get(), mCrypto->getStrongCount());
mCrypto.clear();
status = OK;
}
else {
ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
}
sp<AMessage> response = new AMessage;
response->setInt32("status", status);
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
}
status_t MediaCodec::setInputSurface(
const sp<PersistentSurface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
msg->setObject("input-surface", surface.get());
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::setSurface(const sp<Surface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
msg->setObject("surface", surface);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::createInputSurface(
sp<IGraphicBufferProducer>* bufferProducer) {
sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
sp<AMessage> response;
status_t err = PostAndAwaitResponse(msg, &response);
if (err == NO_ERROR) {
// unwrap the sp<IGraphicBufferProducer>
sp<RefBase> obj;
bool found = response->findObject("input-surface", &obj);
CHECK(found);
sp<BufferProducerWrapper> wrapper(
static_cast<BufferProducerWrapper*>(obj.get()));
*bufferProducer = wrapper->getBufferProducer();
} else {
ALOGW("createInputSurface failed, err=%d", err);
}
return err;
}
uint64_t MediaCodec::getGraphicBufferSize() {
if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
return 0;
}
uint64_t size = 0;
size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
for (size_t i = 0; i < portNum; ++i) {
// TODO: this is just an estimation, we should get the real buffer size from ACodec.
size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
}
return size;
}
status_t MediaCodec::start() {
sp<AMessage> msg = new AMessage(kWhatStart, this);
sp<AMessage> callback;
status_t err;
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
toMediaResourceSubType(mDomain)));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
resources.push_back(MediaResource::GraphicMemoryResource(1));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
if (!mResourceManagerProxy->reclaimResource(resources)) {
break;
}
// Recover codec from previous error before retry start.
err = reset();
if (err != OK) {
ALOGE("retrying start: failed to reset codec");
break;
}
sp<AMessage> response;
err = PostAndAwaitResponse(mConfigureMsg, &response);
if (err != OK) {
ALOGE("retrying start: failed to configure codec");
break;
}
if (callback != nullptr) {
err = setCallback(callback);
if (err != OK) {
ALOGE("retrying start: failed to set callback");
break;
}
ALOGD("succeed to set callback for reclaim");
}
}
// Keep callback message after the first iteration if necessary.
if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
callback = mCallback;
ALOGD("keep callback message for reclaim");
}
sp<AMessage> response;
err = PostAndAwaitResponse(msg, &response);
if (!isResourceError(err)) {
break;
}
}
return err;
}
status_t MediaCodec::stop() {
sp<AMessage> msg = new AMessage(kWhatStop, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
bool MediaCodec::hasPendingBuffer(int portIndex) {
return std::any_of(
mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
[](const BufferInfo &info) { return info.mOwnedByClient; });
}
bool MediaCodec::hasPendingBuffer() {
return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
}
status_t MediaCodec::reclaim(bool force) {
ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
sp<AMessage> msg = new AMessage(kWhatRelease, this);
msg->setInt32("reclaimed", 1);
msg->setInt32("force", force ? 1 : 0);
sp<AMessage> response;
status_t ret = PostAndAwaitResponse(msg, &response);
if (ret == -ENOENT) {
ALOGD("MediaCodec looper is gone, skip reclaim");
ret = OK;
}
return ret;
}
status_t MediaCodec::release() {
sp<AMessage> msg = new AMessage(kWhatRelease, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::releaseAsync(const sp<AMessage> &notify) {
sp<AMessage> msg = new AMessage(kWhatRelease, this);
msg->setMessage("async", notify);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::reset() {
/* When external-facing MediaCodec object is created,
it is already initialized. Thus, reset is essentially
release() followed by init(), plus clearing the state */
status_t err = release();
// unregister handlers
if (mCodec != NULL) {
if (mCodecLooper != NULL) {
mCodecLooper->unregisterHandler(mCodec->id());
} else {
mLooper->unregisterHandler(mCodec->id());
}
mCodec = NULL;
}
mLooper->unregisterHandler(id());
mFlags = 0; // clear all flags
mStickyError = OK;
// reset state not reset by setState(UNINITIALIZED)
mDequeueInputReplyID = 0;
mDequeueOutputReplyID = 0;
mDequeueInputTimeoutGeneration = 0;
mDequeueOutputTimeoutGeneration = 0;
mHaveInputSurface = false;
if (err == OK) {
err = init(mInitName);
}
return err;
}
status_t MediaCodec::queueInputBuffer(
size_t index,
size_t offset,
size_t size,
int64_t presentationTimeUs,
uint32_t flags,
AString *errorDetailMsg) {
if (errorDetailMsg != NULL) {
errorDetailMsg->clear();
}
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
msg->setSize("offset", offset);
msg->setSize("size", size);
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::queueSecureInputBuffer(
size_t index,
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
const uint8_t key[16],
const uint8_t iv[16],
CryptoPlugin::Mode mode,
const CryptoPlugin::Pattern &pattern,
int64_t presentationTimeUs,
uint32_t flags,
AString *errorDetailMsg) {
if (errorDetailMsg != NULL) {
errorDetailMsg->clear();
}
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
msg->setSize("offset", offset);
msg->setPointer("subSamples", (void *)subSamples);
msg->setSize("numSubSamples", numSubSamples);
msg->setPointer("key", (void *)key);
msg->setPointer("iv", (void *)iv);
msg->setInt32("mode", mode);
msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
msg->setInt32("skipBlocks", pattern.mSkipBlocks);
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
status_t err = PostAndAwaitResponse(msg, &response);
return err;
}
status_t MediaCodec::queueBuffer(
size_t index,
const std::shared_ptr<C2Buffer> &buffer,
int64_t presentationTimeUs,
uint32_t flags,
const sp<AMessage> &tunings,
AString *errorDetailMsg) {
if (errorDetailMsg != NULL) {
errorDetailMsg->clear();
}
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
msg->setObject("c2buffer", obj);
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
if (tunings && tunings->countEntries() > 0) {
msg->setMessage("tunings", tunings);
}
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
status_t err = PostAndAwaitResponse(msg, &response);
return err;
}
status_t MediaCodec::queueEncryptedBuffer(
size_t index,
const sp<hardware::HidlMemory> &buffer,
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
const uint8_t key[16],
const uint8_t iv[16],
CryptoPlugin::Mode mode,
const CryptoPlugin::Pattern &pattern,
int64_t presentationTimeUs,
uint32_t flags,
const sp<AMessage> &tunings,
AString *errorDetailMsg) {
if (errorDetailMsg != NULL) {
errorDetailMsg->clear();
}
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
msg->setObject("memory", memory);
msg->setSize("offset", offset);
msg->setPointer("subSamples", (void *)subSamples);
msg->setSize("numSubSamples", numSubSamples);
msg->setPointer("key", (void *)key);
msg->setPointer("iv", (void *)iv);
msg->setInt32("mode", mode);
msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
msg->setInt32("skipBlocks", pattern.mSkipBlocks);
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
if (tunings && tunings->countEntries() > 0) {
msg->setMessage("tunings", tunings);
}
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
status_t err = PostAndAwaitResponse(msg, &response);
return err;
}
status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
msg->setInt64("timeoutUs", timeoutUs);
sp<AMessage> response;
status_t err;
if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
return err;
}
CHECK(response->findSize("index", index));
return OK;
}
status_t MediaCodec::dequeueOutputBuffer(
size_t *index,
size_t *offset,
size_t *size,
int64_t *presentationTimeUs,
uint32_t *flags,
int64_t timeoutUs) {
sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
msg->setInt64("timeoutUs", timeoutUs);
sp<AMessage> response;
status_t err;
if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
return err;
}
CHECK(response->findSize("index", index));
CHECK(response->findSize("offset", offset));
CHECK(response->findSize("size", size));
CHECK(response->findInt64("timeUs", presentationTimeUs));
CHECK(response->findInt32("flags", (int32_t *)flags));
return OK;
}
status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
msg->setInt32("render", true);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
msg->setInt32("render", true);
msg->setInt64("timestampNs", timestampNs);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::releaseOutputBuffer(size_t index) {
sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
msg->setSize("index", index);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::signalEndOfInputStream() {
sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
sp<AMessage> response;
status_t err;
if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
return err;
}
CHECK(response->findMessage("format", format));
return OK;
}
status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
sp<AMessage> response;
status_t err;
if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
return err;
}