blob: 7d478371cd4e1d79c432ada92babdfa2ec185854 [file] [log] [blame] [edit]
/*
* Copyright 2012, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#include "hidl/HidlSupport.h"
#define LOG_TAG "MediaCodec"
#include <utils/Log.h>
#include <dlfcn.h>
#include <inttypes.h>
#include <future>
#include <random>
#include <set>
#include <string>
#include <C2Buffer.h>
#include "include/SoftwareRenderer.h"
#include <android_media_codec.h>
#include <android/api-level.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
#include <aidl/android/media/BnResourceManagerClient.h>
#include <aidl/android/media/IResourceManagerService.h>
#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
#include <android/dlext.h>
#include <android-base/stringprintf.h>
#include <binder/IMemory.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
#include <gui/Surface.h>
#include <hidlmemory/FrameworkUtils.h>
#include <mediadrm/ICrypto.h>
#include <media/IOMX.h>
#include <media/MediaCodecBuffer.h>
#include <media/MediaCodecInfo.h>
#include <media/MediaMetricsItem.h>
#include <media/MediaResource.h>
#include <media/NdkMediaErrorPriv.h>
#include <media/NdkMediaFormat.h>
#include <media/NdkMediaFormatPriv.h>
#include <media/formatshaper/FormatShaper.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/avc_utils.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/ACodec.h>
#include <media/stagefright/BatteryChecker.h>
#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/CCodec.h>
#include <media/stagefright/CryptoAsync.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/RenderedFrameInfo.h>
#include <media/stagefright/SurfaceUtils.h>
#include <nativeloader/dlext_namespaces.h>
#include <private/android_filesystem_config.h>
#include <server_configurable_flags/get_flags.h>
#include <utils/Singleton.h>
namespace android {
using Status = ::ndk::ScopedAStatus;
using aidl::android::media::BnResourceManagerClient;
using aidl::android::media::IResourceManagerClient;
using aidl::android::media::IResourceManagerService;
using aidl::android::media::ClientInfoParcel;
using server_configurable_flags::GetServerConfigurableFlag;
using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
using JudderEvent = VideoRenderQualityTracker::JudderEvent;
// key for media statistics
static const char *kCodecKeyName = "codec";
// attrs for media statistics
// NB: these are matched with public Java API constants defined
// in frameworks/base/media/java/android/media/MediaCodec.java
// These must be kept synchronized with the constants there.
static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
static const char *kCodecId = "android.media.mediacodec.id";
static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
static const char *kCodecModeAudio = "audio";
static const char *kCodecModeImage = "image";
static const char *kCodecModeUnknown = "unknown";
static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
static const char *kCodecPriority = "android.media.mediacodec.priority";
// Min/Max QP before shaping
static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
// Min/Max QP after shaping
static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
// NB: These are not yet exposed as public Java API constants.
static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
static const char *kCodecError = "android.media.mediacodec.errcode";
static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
static const char *kCodecErrorState = "android.media.mediacodec.errstate";
static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
// HDR metrics
static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
// array/sync/async/block modes
static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
// max size configured by the app
static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
// max size actually used
static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
// max size suggested by the codec
static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
// the kCodecRecent* fields appear only in getMetrics() results
static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
/* -1: shaper disabled
>=0: number of fields changed */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
// Render metrics
static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
// Freeze
static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
static const char *kCodecFreezeDurationMsHistogram =
"android.media.mediacodec.freeze-duration-ms-histogram";
static const char *kCodecFreezeDurationMsHistogramBuckets =
"android.media.mediacodec.freeze-duration-ms-histogram-buckets";
static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
static const char *kCodecFreezeDistanceMsHistogram =
"android.media.mediacodec.freeze-distance-ms-histogram";
static const char *kCodecFreezeDistanceMsHistogramBuckets =
"android.media.mediacodec.freeze-distance-ms-histogram-buckets";
// Judder
static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
static const char *kCodecJudderScoreHistogramBuckets =
"android.media.mediacodec.judder-score-histogram-buckets";
// Freeze event
static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
static const char *kFreezeEventKeyName = "videofreeze";
static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
static const char *kFreezeEventDetailsDurationMs =
"android.media.mediacodec.freeze.details-duration-ms";
static const char *kFreezeEventDetailsDistanceMs =
"android.media.mediacodec.freeze.details-distance-ms";
// Judder event
static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
static const char *kJudderEventKeyName = "videojudder";
static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
static const char *kJudderEventDetailsActualDurationUs =
"android.media.mediacodec.judder.details-actual-duration-us";
static const char *kJudderEventDetailsContentDurationUs =
"android.media.mediacodec.judder.details-content-duration-us";
static const char *kJudderEventDetailsDistanceMs =
"android.media.mediacodec.judder.details-distance-ms";
// XXX suppress until we get our representation right
static bool kEmitHistogram = false;
typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
// Multi access unit helpers
static status_t generateFlagsFromAccessUnitInfo(
sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
msg->setInt32("flags", bufferInfos->value[0].mFlags);
// will prevent any access-unit info copy.
if (bufferInfos->value.size() > 1) {
uint32_t bufferFlags = 0;
uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
uint32_t andFlags = flagsInAllAU;
int infoIdx = 0;
bool foundEndOfStream = false;
for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
bufferFlags |= bufferInfos->value[infoIdx].mFlags;
andFlags &= bufferInfos->value[infoIdx].mFlags;
if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
foundEndOfStream = true;
}
}
bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
if (infoIdx != bufferInfos->value.size()) {
ALOGE("Error: incorrect access-units");
return -EINVAL;
}
msg->setInt32("flags", bufferFlags);
}
return OK;
}
static int64_t getId(IResourceManagerClient const * client) {
return (int64_t) client;
}
static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
return getId(client.get());
}
static bool isResourceError(status_t err) {
return (err == NO_MEMORY);
}
static bool areRenderMetricsEnabled() {
std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
return v == "true";
}
static const int kMaxRetry = 2;
static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
static const int kNumBuffersAlign = 16;
static const C2MemoryUsage kDefaultReadWriteUsage{
C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
////////////////////////////////////////////////////////////////////////////////
/*
* Implementation of IResourceManagerClient interrface that facilitates
* MediaCodec reclaim for the ResourceManagerService.
*/
struct ResourceManagerClient : public BnResourceManagerClient {
explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
mMediaCodec(codec), mPid(pid), mUid(uid) {}
Status reclaimResource(bool* _aidl_return) override {
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
// Codec is already gone, so remove the resources as well
::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
std::shared_ptr<IResourceManagerService> service =
IResourceManagerService::fromBinder(binder);
if (service == nullptr) {
ALOGE("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
*_aidl_return = false;
return Status::fromStatus(STATUS_INVALID_OPERATION);
}
ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
.uid = static_cast<int32_t>(mUid),
.id = getId(this)};
service->removeClient(clientInfo);
*_aidl_return = true;
return Status::ok();
}
status_t err = codec->reclaim();
if (err == WOULD_BLOCK) {
ALOGD("Wait for the client to release codec.");
usleep(kMaxReclaimWaitTimeInUs);
ALOGD("Try to reclaim again.");
err = codec->reclaim(true /* force */);
}
if (err != OK) {
ALOGW("ResourceManagerClient failed to release codec with err %d", err);
}
*_aidl_return = (err == OK);
return Status::ok();
}
Status getName(::std::string* _aidl_return) override {
_aidl_return->clear();
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
// codec is already gone.
return Status::ok();
}
AString name;
if (codec->getName(&name) == OK) {
*_aidl_return = name.c_str();
}
return Status::ok();
}
virtual ~ResourceManagerClient() {}
private:
wp<MediaCodec> mMediaCodec;
int32_t mPid;
int32_t mUid;
DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
};
/*
* Proxy for ResourceManagerService that communicates with the
* ResourceManagerService for MediaCodec
*/
struct MediaCodec::ResourceManagerServiceProxy :
public std::enable_shared_from_this<ResourceManagerServiceProxy> {
// BinderDiedContext defines the cookie that is passed as DeathRecipient.
// Since this can maintain more context than a raw pointer, we can
// validate the scope of ResourceManagerServiceProxy,
// before deferencing it upon the binder death.
struct BinderDiedContext {
std::weak_ptr<ResourceManagerServiceProxy> mRMServiceProxy;
};
ResourceManagerServiceProxy(pid_t pid, uid_t uid,
const std::shared_ptr<IResourceManagerClient> &client);
~ResourceManagerServiceProxy();
status_t init();
void addResource(const MediaResourceParcel &resource);
void removeResource(const MediaResourceParcel &resource);
void removeClient();
void markClientForPendingRemoval();
bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
void notifyClientCreated();
void notifyClientStarted(ClientConfigParcel& clientConfig);
void notifyClientStopped(ClientConfigParcel& clientConfig);
void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
inline void setCodecName(const char* name) {
mCodecName = name;
}
inline void setImportance(int importance) {
mImportance = importance;
}
private:
// To get the binder interface to ResourceManagerService.
void getService() {
std::scoped_lock lock{mLock};
getService_l();
}
std::shared_ptr<IResourceManagerService> getService_l();
// To add/register all the resources currently added/registered with
// the ResourceManagerService.
// This function will be called right after the death of the Resource
// Manager to make sure that the newly started ResourceManagerService
// knows about the current resource usage.
void reRegisterAllResources_l();
void deinit() {
std::scoped_lock lock{mLock};
// Unregistering from DeathRecipient notification.
if (mService != nullptr) {
AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
mService = nullptr;
}
}
// For binder death handling
static void BinderDiedCallback(void* cookie);
static void BinderUnlinkedCallback(void* cookie);
void binderDied() {
std::scoped_lock lock{mLock};
ALOGE("ResourceManagerService died.");
mService = nullptr;
mBinderDied = true;
// start an async operation that will reconnect with the RM and
// re-registers all the resources.
mGetServiceFuture = std::async(std::launch::async, [this] { getService(); });
}
/**
* Get the ClientInfo to communicate with the ResourceManager.
*
* ClientInfo includes:
* - {pid, uid} of the process
* - identifier for the client
* - name of the client/codec
* - importance associated with the client
*/
inline ClientInfoParcel getClientInfo() const {
ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
.uid = static_cast<int32_t>(mUid),
.id = getId(mClient),
.name = mCodecName,
.importance = mImportance};
return clientInfo;
}
private:
std::mutex mLock;
bool mBinderDied = false;
pid_t mPid;
uid_t mUid;
int mImportance = 0;
std::string mCodecName;
/**
* Reconnecting with the ResourceManagerService, after its binder interface dies,
* is done asynchronously. It will also make sure that, all the resources
* asssociated with this Proxy (MediaCodec) is added with the new instance
* of the ResourceManagerService to persist the state of resources.
* We must store the reference of the furture to guarantee real asynchronous operation.
*/
std::future<void> mGetServiceFuture;
// To maintain the list of all the resources currently added/registered with
// the ResourceManagerService.
std::set<MediaResourceParcel> mMediaResourceParcel;
std::shared_ptr<IResourceManagerClient> mClient;
::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
std::shared_ptr<IResourceManagerService> mService;
BinderDiedContext* mCookie;
};
MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client) :
mPid(pid), mUid(uid), mClient(client),
mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
AIBinder_DeathRecipient_new(BinderDiedCallback))),
mCookie(nullptr) {
if (mUid == MediaCodec::kNoUid) {
mUid = AIBinder_getCallingUid();
}
if (mPid == MediaCodec::kNoPid) {
mPid = AIBinder_getCallingPid();
}
// Setting callback notification when DeathRecipient gets deleted.
AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
}
MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
deinit();
}
status_t MediaCodec::ResourceManagerServiceProxy::init() {
std::scoped_lock lock{mLock};
int callerPid = AIBinder_getCallingPid();
int callerUid = AIBinder_getCallingUid();
if (mPid != callerPid || mUid != callerUid) {
// Media processes don't need special permissions to act on behalf of other processes.
if (callerUid != AID_MEDIA) {
char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
if (!checkCallingPermission(String16(permission))) {
ALOGW("%s is required to override the caller's PID for media resource management.",
permission);
return PERMISSION_DENIED;
}
}
}
mService = getService_l();
if (mService == nullptr) {
return DEAD_OBJECT;
}
// Kill clients pending removal.
mService->reclaimResourcesFromClientsPendingRemoval(mPid);
return OK;
}
std::shared_ptr<IResourceManagerService> MediaCodec::ResourceManagerServiceProxy::getService_l() {
if (mService != nullptr) {
return mService;
}
// Get binder interface to resource manager.
::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
mService = IResourceManagerService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to get ResourceManagerService");
return mService;
}
// Create the context that is passed as cookie to the binder death notification.
// The context gets deleted at BinderUnlinkedCallback.
mCookie = new BinderDiedContext{.mRMServiceProxy = weak_from_this()};
// Register for the callbacks by linking to death notification.
AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
// If the RM was restarted, re-register all the resources.
if (mBinderDied) {
reRegisterAllResources_l();
mBinderDied = false;
}
return mService;
}
void MediaCodec::ResourceManagerServiceProxy::reRegisterAllResources_l() {
if (mMediaResourceParcel.empty()) {
ALOGV("No resources to add");
return;
}
if (mService == nullptr) {
ALOGW("Service isn't available");
return;
}
std::vector<MediaResourceParcel> resources;
std::copy(mMediaResourceParcel.begin(), mMediaResourceParcel.end(),
std::back_inserter(resources));
mService->addResource(getClientInfo(), mClient, resources);
}
void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
// Validate the context and check if the ResourceManagerServiceProxy object is still in scope.
if (context != nullptr) {
std::shared_ptr<ResourceManagerServiceProxy> thiz = context->mRMServiceProxy.lock();
if (thiz != nullptr) {
thiz->binderDied();
} else {
ALOGI("ResourceManagerServiceProxy is out of scope already");
}
}
}
void MediaCodec::ResourceManagerServiceProxy::BinderUnlinkedCallback(void* cookie) {
BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
// Since we don't need the context anymore, we are deleting it now.
delete context;
}
void MediaCodec::ResourceManagerServiceProxy::addResource(
const MediaResourceParcel &resource) {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
std::vector<MediaResourceParcel> resources;
resources.push_back(resource);
service->addResource(getClientInfo(), mClient, resources);
mMediaResourceParcel.emplace(resource);
}
void MediaCodec::ResourceManagerServiceProxy::removeResource(
const MediaResourceParcel &resource) {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
std::vector<MediaResourceParcel> resources;
resources.push_back(resource);
service->removeResource(getClientInfo(), resources);
mMediaResourceParcel.erase(resource);
}
void MediaCodec::ResourceManagerServiceProxy::removeClient() {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
service->removeClient(getClientInfo());
mMediaResourceParcel.clear();
}
void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
service->markClientForPendingRemoval(getClientInfo());
mMediaResourceParcel.clear();
}
bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
const std::vector<MediaResourceParcel> &resources) {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return false;
}
bool success;
Status status = service->reclaimResource(getClientInfo(), resources, &success);
return status.isOk() && success;
}
void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
service->notifyClientCreated(getClientInfo());
}
void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
ClientConfigParcel& clientConfig) {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
clientConfig.clientInfo = getClientInfo();
service->notifyClientStarted(clientConfig);
}
void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
ClientConfigParcel& clientConfig) {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
clientConfig.clientInfo = getClientInfo();
service->notifyClientStopped(clientConfig);
}
void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
ClientConfigParcel& clientConfig) {
std::scoped_lock lock{mLock};
std::shared_ptr<IResourceManagerService> service = getService_l();
if (service == nullptr) {
ALOGW("Service isn't available");
return;
}
clientConfig.clientInfo = getClientInfo();
service->notifyClientConfigChanged(clientConfig);
}
////////////////////////////////////////////////////////////////////////////////
MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
////////////////////////////////////////////////////////////////////////////////
class MediaCodec::ReleaseSurface {
public:
explicit ReleaseSurface(uint64_t usage) {
BufferQueue::createBufferQueue(&mProducer, &mConsumer);
mSurface = new Surface(mProducer, false /* controlledByApp */);
struct ConsumerListener : public BnConsumerListener {
ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
mConsumer = consumer;
}
void onFrameAvailable(const BufferItem&) override {
BufferItem buffer;
// consume buffer
sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
}
}
wp<IGraphicBufferConsumer> mConsumer;
void onBuffersReleased() override {}
void onSidebandStreamChanged() override {}
};
sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
mConsumer->consumerConnect(listener, false);
mConsumer->setConsumerName(String8{"MediaCodec.release"});
mConsumer->setConsumerUsageBits(usage);
}
const sp<Surface> &getSurface() {
return mSurface;
}
private:
sp<IGraphicBufferProducer> mProducer;
sp<IGraphicBufferConsumer> mConsumer;
sp<Surface> mSurface;
};
////////////////////////////////////////////////////////////////////////////////
namespace {
enum {
kWhatFillThisBuffer = 'fill',
kWhatDrainThisBuffer = 'drai',
kWhatEOS = 'eos ',
kWhatStartCompleted = 'Scom',
kWhatStopCompleted = 'scom',
kWhatReleaseCompleted = 'rcom',
kWhatFlushCompleted = 'fcom',
kWhatError = 'erro',
kWhatCryptoError = 'ercp',
kWhatComponentAllocated = 'cAll',
kWhatComponentConfigured = 'cCon',
kWhatInputSurfaceCreated = 'isfc',
kWhatInputSurfaceAccepted = 'isfa',
kWhatSignaledInputEOS = 'seos',
kWhatOutputFramesRendered = 'outR',
kWhatOutputBuffersChanged = 'outC',
kWhatFirstTunnelFrameReady = 'ftfR',
kWhatPollForRenderedBuffers = 'plrb',
kWhatMetricsUpdated = 'mtru',
};
class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
public:
explicit CryptoAsyncCallback(const sp<AMessage> & notify):mNotify(notify) {
}
~CryptoAsyncCallback() {}
void onDecryptComplete(const sp<AMessage> &result) override {
(void)result;
}
void onDecryptError(const std::list<sp<AMessage>> &errorMsgs) override {
// This error may be decrypt/queue error.
status_t errorCode ;
for (auto &emsg : errorMsgs) {
sp<AMessage> notify(mNotify->dup());
if(emsg->findInt32("err", &errorCode)) {
if (isCryptoError(errorCode)) {
notify->setInt32("what", kWhatCryptoError);
} else {
notify->setInt32("what", kWhatError);
}
notify->extend(emsg);
notify->post();
} else {
ALOGW("Buffers with no errorCode are not expected");
}
}
}
private:
const sp<AMessage> mNotify;
};
class OnBufferReleasedListener : public ::android::BnProducerListener{
private:
uint32_t mGeneration;
std::weak_ptr<BufferChannelBase> mBufferChannel;
void notifyBufferReleased() {
auto p = mBufferChannel.lock();
if (p) {
p->onBufferReleasedFromOutputSurface(mGeneration);
}
}
void notifyBufferAttached() {
auto p = mBufferChannel.lock();
if (p) {
p->onBufferAttachedToOutputSurface(mGeneration);
}
}
public:
explicit OnBufferReleasedListener(
uint32_t generation,
const std::shared_ptr<BufferChannelBase> &bufferChannel)
: mGeneration(generation), mBufferChannel(bufferChannel) {}
virtual ~OnBufferReleasedListener() = default;
void onBufferReleased() override {
notifyBufferReleased();
}
void onBufferDetached([[maybe_unused]] int slot) override {
notifyBufferReleased();
}
bool needsReleaseNotify() override { return true; }
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(BQ_CONSUMER_ATTACH_CALLBACK)
void onBufferAttached() override {
notifyBufferAttached();
}
bool needsAttachNotify() override { return true; }
#endif
};
class BufferCallback : public CodecBase::BufferCallback {
public:
explicit BufferCallback(const sp<AMessage> &notify);
virtual ~BufferCallback() = default;
virtual void onInputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) override;
virtual void onOutputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) override;
private:
const sp<AMessage> mNotify;
};
BufferCallback::BufferCallback(const sp<AMessage> &notify)
: mNotify(notify) {}
void BufferCallback::onInputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatFillThisBuffer);
notify->setSize("index", index);
notify->setObject("buffer", buffer);
notify->post();
}
void BufferCallback::onOutputBufferAvailable(
size_t index, const sp<MediaCodecBuffer> &buffer) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatDrainThisBuffer);
notify->setSize("index", index);
notify->setObject("buffer", buffer);
notify->post();
}
class CodecCallback : public CodecBase::CodecCallback {
public:
explicit CodecCallback(const sp<AMessage> &notify);
virtual ~CodecCallback() = default;
virtual void onEos(status_t err) override;
virtual void onStartCompleted() override;
virtual void onStopCompleted() override;
virtual void onReleaseCompleted() override;
virtual void onFlushCompleted() override;
virtual void onError(status_t err, enum ActionCode actionCode) override;
virtual void onComponentAllocated(const char *componentName) override;
virtual void onComponentConfigured(
const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
virtual void onInputSurfaceCreated(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat,
const sp<BufferProducerWrapper> &inputSurface) override;
virtual void onInputSurfaceCreationFailed(status_t err) override;
virtual void onInputSurfaceAccepted(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat) override;
virtual void onInputSurfaceDeclined(status_t err) override;
virtual void onSignaledInputEOS(status_t err) override;
virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
virtual void onOutputBuffersChanged() override;
virtual void onFirstTunnelFrameReady() override;
virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
private:
const sp<AMessage> mNotify;
};
CodecCallback::CodecCallback(const sp<AMessage> &notify) : mNotify(notify) {}
void CodecCallback::onEos(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatEOS);
notify->setInt32("err", err);
notify->post();
}
void CodecCallback::onStartCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatStartCompleted);
notify->post();
}
void CodecCallback::onStopCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatStopCompleted);
notify->post();
}
void CodecCallback::onReleaseCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatReleaseCompleted);
notify->post();
}
void CodecCallback::onFlushCompleted() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatFlushCompleted);
notify->post();
}
void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatError);
notify->setInt32("err", err);
notify->setInt32("actionCode", actionCode);
notify->post();
}
void CodecCallback::onComponentAllocated(const char *componentName) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatComponentAllocated);
notify->setString("componentName", componentName);
notify->post();
}
void CodecCallback::onComponentConfigured(
const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatComponentConfigured);
notify->setMessage("input-format", inputFormat);
notify->setMessage("output-format", outputFormat);
notify->post();
}
void CodecCallback::onInputSurfaceCreated(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat,
const sp<BufferProducerWrapper> &inputSurface) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceCreated);
notify->setMessage("input-format", inputFormat);
notify->setMessage("output-format", outputFormat);
notify->setObject("input-surface", inputSurface);
notify->post();
}
void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceCreated);
notify->setInt32("err", err);
notify->post();
}
void CodecCallback::onInputSurfaceAccepted(
const sp<AMessage> &inputFormat,
const sp<AMessage> &outputFormat) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceAccepted);
notify->setMessage("input-format", inputFormat);
notify->setMessage("output-format", outputFormat);
notify->post();
}
void CodecCallback::onInputSurfaceDeclined(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatInputSurfaceAccepted);
notify->setInt32("err", err);
notify->post();
}
void CodecCallback::onSignaledInputEOS(status_t err) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatSignaledInputEOS);
if (err != OK) {
notify->setInt32("err", err);
}
notify->post();
}
void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatOutputFramesRendered);
if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
notify->post();
}
}
void CodecCallback::onOutputBuffersChanged() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatOutputBuffersChanged);
notify->post();
}
void CodecCallback::onFirstTunnelFrameReady() {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatFirstTunnelFrameReady);
notify->post();
}
void CodecCallback::onMetricsUpdated(const sp<AMessage> &updatedMetrics) {
sp<AMessage> notify(mNotify->dup());
notify->setInt32("what", kWhatMetricsUpdated);
notify->setMessage("updated-metrics", updatedMetrics);
notify->post();
}
static MediaResourceSubType toMediaResourceSubType(bool isHardware, MediaCodec::Domain domain) {
switch (domain) {
case MediaCodec::DOMAIN_VIDEO:
return isHardware? MediaResourceSubType::kHwVideoCodec :
MediaResourceSubType::kSwVideoCodec;
case MediaCodec::DOMAIN_AUDIO:
return isHardware? MediaResourceSubType::kHwAudioCodec :
MediaResourceSubType::kSwAudioCodec;
case MediaCodec::DOMAIN_IMAGE:
return isHardware? MediaResourceSubType::kHwImageCodec :
MediaResourceSubType::kSwImageCodec;
default:
return MediaResourceSubType::kUnspecifiedSubType;
}
}
static const char * toCodecMode(MediaCodec::Domain domain) {
switch (domain) {
case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
default: return kCodecModeUnknown;
}
}
} // namespace
////////////////////////////////////////////////////////////////////////////////
// static
sp<MediaCodec> MediaCodec::CreateByType(
const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
uid_t uid) {
sp<AMessage> format;
return CreateByType(looper, mime, encoder, err, pid, uid, format);
}
sp<MediaCodec> MediaCodec::CreateByType(
const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
uid_t uid, sp<AMessage> format) {
Vector<AString> matchingCodecs;
MediaCodecList::findMatchingCodecs(
mime.c_str(),
encoder,
0,
format,
&matchingCodecs);
if (err != NULL) {
*err = NAME_NOT_FOUND;
}
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
AString componentName = matchingCodecs[i];
status_t ret = codec->init(componentName);
if (err != NULL) {
*err = ret;
}
if (ret == OK) {
return codec;
}
ALOGD("Allocating component '%s' failed (%d), try next one.",
componentName.c_str(), ret);
}
return NULL;
}
// static
sp<MediaCodec> MediaCodec::CreateByComponentName(
const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
const status_t ret = codec->init(name);
if (err != NULL) {
*err = ret;
}
return ret == OK ? codec : NULL; // NULL deallocates codec.
}
// static
sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
if (pluginSurface != nullptr) {
return pluginSurface;
}
OMXClient client;
if (client.connect() != OK) {
ALOGE("Failed to connect to OMX to create persistent input surface.");
return NULL;
}
sp<IOMX> omx = client.interface();
sp<IGraphicBufferProducer> bufferProducer;
sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
if (err != OK) {
ALOGE("Failed to create persistent input surface.");
return NULL;
}
return new PersistentSurface(bufferProducer, bufferSource);
}
// GenerateCodecId generates a 64bit Random ID for each codec that is created.
// The Codec ID is generated as:
// - A process-unique random high 32bits
// - An atomic sequence low 32bits
//
static uint64_t GenerateCodecId() {
static std::atomic_uint64_t sId = [] {
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
uint32_t randomID = distrib(gen);
uint64_t id = randomID;
return id << 32;
}();
return sId++;
}
MediaCodec::MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
: mState(UNINITIALIZED),
mReleasedByResourceManager(false),
mLooper(looper),
mCodec(NULL),
mReplyID(0),
mFlags(0),
mStickyError(OK),
mSoftRenderer(NULL),
mDomain(DOMAIN_UNKNOWN),
mWidth(0),
mHeight(0),
mRotationDegrees(0),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
mDequeueOutputReplyID(0),
mTunneledInputWidth(0),
mTunneledInputHeight(0),
mTunneled(false),
mTunnelPeekState(TunnelPeekState::kLegacyMode),
mTunnelPeekEnabled(false),
mHaveInputSurface(false),
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
mIsSurfaceToDisplay(false),
mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
mVideoRenderQualityTracker(
VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
GetServerConfigurableFlag)),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
mLatestEncodedPtsUs(INT64_MIN),
mFramesEncoded(0),
mNumLowLatencyEnables(0),
mNumLowLatencyDisables(0),
mIsLowLatencyModeOn(false),
mIndexOfFirstFrameWhenLowLatencyOn(-1),
mInputBufferCounter(0),
mGetCodecBase(getCodecBase),
mGetCodecInfo(getCodecInfo) {
mCodecId = GenerateCodecId();
mResourceManagerProxy = std::make_shared<ResourceManagerServiceProxy>(pid, uid,
::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
if (!mGetCodecBase) {
mGetCodecBase = [](const AString &name, const char *owner) {
return GetCodecBase(name, owner);
};
}
if (!mGetCodecInfo) {
mGetCodecInfo = [&log = mErrorLog](const AString &name,
sp<MediaCodecInfo> *info) -> status_t {
*info = nullptr;
const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
if (!mcl) {
log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
return NO_INIT; // if called from Java should raise IOException
}
AString tmp = name;
if (tmp.endsWith(".secure")) {
tmp.erase(tmp.size() - 7, 7);
}
for (const AString &codecName : { name, tmp }) {
ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
if (codecIdx < 0) {
continue;
}
*info = mcl->getCodecInfo(codecIdx);
return OK;
}
log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
name.c_str()));
return NAME_NOT_FOUND;
};
}
// we want an empty metrics record for any early getMetrics() call
// this should be the *only* initMediametrics() call that's not on the Looper thread
initMediametrics();
}
MediaCodec::~MediaCodec() {
CHECK_EQ(mState, UNINITIALIZED);
mResourceManagerProxy->removeClient();
flushMediametrics();
// clean any saved metrics info we stored as part of configure()
if (mConfigureMsg != nullptr) {
mediametrics_handle_t metricsHandle;
if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
mediametrics_delete(metricsHandle);
}
}
}
// except for in constructor, called from the looper thread (and therefore mutexed)
void MediaCodec::initMediametrics() {
if (mMetricsHandle == 0) {
mMetricsHandle = mediametrics_create(kCodecKeyName);
}
mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
{
Mutex::Autolock al(mRecentLock);
for (int i = 0; i<kRecentLatencyFrames; i++) {
mRecentSamples[i] = kRecentSampleInvalid;
}
mRecentHead = 0;
}
{
Mutex::Autolock al(mLatencyLock);
mBuffersInFlight.clear();
mNumLowLatencyEnables = 0;
mNumLowLatencyDisables = 0;
mIsLowLatencyModeOn = false;
mIndexOfFirstFrameWhenLowLatencyOn = -1;
mInputBufferCounter = 0;
}
mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
resetMetricsFields();
}
void MediaCodec::resetMetricsFields() {
mHdrInfoFlags = 0;
mApiUsageMetrics = ApiUsageMetrics();
mReliabilityContextMetrics = ReliabilityContextMetrics();
}
void MediaCodec::updateMediametrics() {
if (mMetricsHandle == 0) {
ALOGV("no metrics handle found");
return;
}
Mutex::Autolock _lock(mMetricsLock);
mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
: ApiUsageMetrics::kAsynchronousMode)
: ApiUsageMetrics::kSynchronousMode;
mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
mApiUsageMetrics.inputBufferSize.appMax);
mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
mApiUsageMetrics.inputBufferSize.usedMax);
mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
mApiUsageMetrics.inputBufferSize.codecMax);
mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
mReliabilityContextMetrics.setOutputSurfaceCount);
mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
mReliabilityContextMetrics.resolutionChangeCount);
// Video rendering quality metrics
{
const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
if (m.frameReleasedCount > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
}
if (m.freezeDurationMsHistogram.getCount() >= 1) {
const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
h.emitBuckets());
}
if (m.freezeDistanceMsHistogram.getCount() >= 1) {
const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
h.emitBuckets());
}
if (m.judderScoreHistogram.getCount() >= 1) {
const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
h.emitBuckets());
}
if (m.freezeEventCount != 0) {
mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
}
if (m.judderEventCount != 0) {
mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
}
}
if (mLatencyHist.getCount() != 0 ) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
if (kEmitHistogram) {
// and the histogram itself
std::string hist = mLatencyHist.emit();
mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
}
}
if (mLatencyUnknown > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
}
int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
if (playbackDurationSec > 0) {
mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
}
if (mLifetimeStartNs > 0) {
nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
}
if (mBytesEncoded) {
Mutex::Autolock al(mOutputStatsLock);
mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
int64_t duration = 0;
if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
}
mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
}
{
Mutex::Autolock al(mLatencyLock);
mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
mIndexOfFirstFrameWhenLowLatencyOn);
}
#if 0
// enable for short term, only while debugging
updateEphemeralMediametrics(mMetricsHandle);
#endif
}
void MediaCodec::updateHdrMetrics(bool isConfig) {
if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
return;
}
int32_t colorStandard = -1;
if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
mediametrics_setInt32(mMetricsHandle,
isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
}
int32_t colorRange = -1;
if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
mediametrics_setInt32(mMetricsHandle,
isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
}
int32_t colorTransfer = -1;
if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
mediametrics_setInt32(mMetricsHandle,
isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
}
HDRStaticInfo info;
if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
&& ColorUtils::isHDRStaticInfoValid(&info)) {
mHdrInfoFlags |= kFlagHasHdrStaticInfo;
}
mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
(mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
sp<ABuffer> hdr10PlusInfo;
if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
&& hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
}
mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
(mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
// hdr format
sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
AString mime;
int32_t profile = -1;
if (codedFormat->findString("mime", &mime)
&& codedFormat->findInt32(KEY_PROFILE, &profile)
&& colorTransfer != -1) {
hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
}
}
hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
const int32_t colorTransfer) {
return (mFlags & kFlagIsEncoder)
? getHdrFormatForEncoder(mime, profile, colorTransfer)
: getHdrFormatForDecoder(mime, profile, colorTransfer);
}
hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
const int32_t colorTransfer) {
switch (colorTransfer) {
case COLOR_TRANSFER_ST2084:
if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
switch (profile) {
case VP9Profile2HDR:
return HDR_FORMAT_HDR10;
case VP9Profile2HDR10Plus:
return HDR_FORMAT_HDR10PLUS;
default:
return HDR_FORMAT_NONE;
}
} else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
switch (profile) {
case AV1ProfileMain10HDR10:
return HDR_FORMAT_HDR10;
case AV1ProfileMain10HDR10Plus:
return HDR_FORMAT_HDR10PLUS;
default:
return HDR_FORMAT_NONE;
}
} else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
switch (profile) {
case HEVCProfileMain10HDR10:
return HDR_FORMAT_HDR10;
case HEVCProfileMain10HDR10Plus:
return HDR_FORMAT_HDR10PLUS;
default:
return HDR_FORMAT_NONE;
}
} else {
return HDR_FORMAT_NONE;
}
case COLOR_TRANSFER_HLG:
if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
return HDR_FORMAT_HLG;
} else {
// TODO: DOLBY format
return HDR_FORMAT_NONE;
}
default:
return HDR_FORMAT_NONE;
}
}
hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
const int32_t colorTransfer) {
switch (colorTransfer) {
case COLOR_TRANSFER_ST2084:
if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
return HDR_FORMAT_NONE;
}
return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
case COLOR_TRANSFER_HLG:
if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
return HDR_FORMAT_HLG;
}
// TODO: DOLBY format
}
return HDR_FORMAT_NONE;
}
bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
return true;
} else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
switch (profile) {
case VP9Profile2:
case VP9Profile3:
case VP9Profile2HDR:
case VP9Profile3HDR:
case VP9Profile2HDR10Plus:
case VP9Profile3HDR10Plus:
return true;
}
} else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
switch (profile) {
case HEVCProfileMain10:
case HEVCProfileMain10HDR10:
case HEVCProfileMain10HDR10Plus:
return true;
}
}
return false;
}
// called to update info being passed back via getMetrics(), which is a
// unique copy for that call, no concurrent access worries.
void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
ALOGD("MediaCodec::updateEphemeralMediametrics()");
if (item == 0) {
return;
}
// build an empty histogram
MediaHistogram<int64_t> recentHist;
recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
// stuff it with the samples in the ring buffer
{
Mutex::Autolock al(mRecentLock);
for (int i = 0; i < kRecentLatencyFrames; i++) {
if (mRecentSamples[i] != kRecentSampleInvalid) {
recentHist.insert(mRecentSamples[i]);
}
}
}
// spit the data (if any) into the supplied analytics record
if (recentHist.getCount() != 0 ) {
mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
if (kEmitHistogram) {
// and the histogram itself
std::string hist = recentHist.emit();
mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
}
}
}
static std::string emitVector(std::vector<int32_t> vector) {
std::ostringstream sstr;
for (size_t i = 0; i < vector.size(); ++i) {
if (i != 0) {
sstr << ',';
}
sstr << vector[i];
}
return sstr.str();
}
static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
if (e.valid) {
mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
mediametrics_setInt64(handle, kFreezeEventCount, e.count);
mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
emitVector(e.details.durationMs));
mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
emitVector(e.details.distanceMs));
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
}
}
static void reportToMediaMetricsIfValid(const JudderEvent &e) {
if (e.valid) {
mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
mediametrics_setInt64(handle, kJudderEventCount, e.count);
mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
emitVector(e.details.actualRenderDurationUs));
mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
emitVector(e.details.contentRenderDurationUs));
mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
emitVector(e.details.distanceMs));
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
}
}
void MediaCodec::flushMediametrics() {
ALOGV("flushMediametrics");
// update does its own mutex locking
updateMediametrics();
resetMetricsFields();
// ensure mutex while we do our own work
Mutex::Autolock _lock(mMetricsLock);
if (mMetricsHandle != 0) {
if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
mediametrics_selfRecord(mMetricsHandle);
}
mediametrics_delete(mMetricsHandle);
mMetricsHandle = 0;
}
// we no longer have anything pending upload
mMetricsToUpload = false;
// Freeze and judder events are reported separately
reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
}
void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
int32_t lowLatency = 0;
if (msg->findInt32("low-latency", &lowLatency)) {
Mutex::Autolock al(mLatencyLock);
if (lowLatency > 0) {
++mNumLowLatencyEnables;
// This is just an estimate since low latency mode change happens ONLY at key frame
mIsLowLatencyModeOn = true;
} else if (lowLatency == 0) {
++mNumLowLatencyDisables;
// This is just an estimate since low latency mode change happens ONLY at key frame
mIsLowLatencyModeOn = false;
}
}
}
void MediaCodec::updateCodecImportance(const sp<AMessage>& msg) {
// Update the codec importance.
int32_t importance = 0;
if (msg->findInt32(KEY_IMPORTANCE, &importance)) {
// Ignoring the negative importance.
if (importance >= 0) {
// Notify RM about the change in the importance.
mResourceManagerProxy->setImportance(importance);
ClientConfigParcel clientConfig;
initClientConfigParcel(clientConfig);
mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
}
}
}
constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
switch(state) {
case TunnelPeekState::kLegacyMode:
return "LegacyMode";
case TunnelPeekState::kEnabledNoBuffer:
return "EnabledNoBuffer";
case TunnelPeekState::kDisabledNoBuffer:
return "DisabledNoBuffer";
case TunnelPeekState::kBufferDecoded:
return "BufferDecoded";
case TunnelPeekState::kBufferRendered:
return "BufferRendered";
case TunnelPeekState::kDisabledQueued:
return "DisabledQueued";
case TunnelPeekState::kEnabledQueued:
return "EnabledQueued";
default:
return default_string;
}
}
void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
int32_t tunnelPeek = 0;
if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
return;
}
TunnelPeekState previousState = mTunnelPeekState;
if(tunnelPeek == 0){
mTunnelPeekEnabled = false;
switch (mTunnelPeekState) {
case TunnelPeekState::kLegacyMode:
msg->setInt32("android._tunnel-peek-set-legacy", 0);
[[fallthrough]];
case TunnelPeekState::kEnabledNoBuffer:
mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
break;
case TunnelPeekState::kEnabledQueued:
mTunnelPeekState = TunnelPeekState::kDisabledQueued;
break;
default:
ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
return;
}
} else {
mTunnelPeekEnabled = true;
switch (mTunnelPeekState) {
case TunnelPeekState::kLegacyMode:
msg->setInt32("android._tunnel-peek-set-legacy", 0);
[[fallthrough]];
case TunnelPeekState::kDisabledNoBuffer:
mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
break;
case TunnelPeekState::kDisabledQueued:
mTunnelPeekState = TunnelPeekState::kEnabledQueued;
break;
case TunnelPeekState::kBufferDecoded:
msg->setInt32("android._trigger-tunnel-peek", 1);
mTunnelPeekState = TunnelPeekState::kBufferRendered;
break;
default:
ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
return;
}
}
ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
}
void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
int what = 0;
msg->findInt32("what", &what);
if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
static bool logged = false;
if (!logged) {
logged = true;
ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
}
return;
}
// Rendered frames only matter if they're being sent to the display
if (mIsSurfaceToDisplay) {
int64_t renderTimeNs;
for (size_t index = 0;
msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
index++) {
// Capture metrics for playback duration
mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
// Capture metrics for quality
int64_t mediaTimeUs = 0;
if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
ALOGE("processRenderedFrames: no media time found");
continue;
}
// Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
// rendered frame.
if (!mTunneled || mediaTimeUs != INT64_MAX) {
FreezeEvent freezeEvent;
JudderEvent judderEvent;
mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
&judderEvent);
reportToMediaMetricsIfValid(freezeEvent);
reportToMediaMetricsIfValid(judderEvent);
}
}
}
}
// when we send a buffer to the codec;
void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
// only enqueue if we have a legitimate time
if (presentationUs <= 0) {
ALOGV("presentation time: %" PRId64, presentationUs);
return;
}
if (mBatteryChecker != nullptr) {
mBatteryChecker->onCodecActivity([this] () {
mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
});
}
if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
mBytesInput += buffer->size();
mFramesInput++;
}
// mutex access to mBuffersInFlight and other stats
Mutex::Autolock al(mLatencyLock);
// XXX: we *could* make sure that the time is later than the end of queue
// as part of a consistency check...
if (!mTunneled) {
const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
BufferFlightTiming_t startdata = { presentationUs, nowNs };
mBuffersInFlight.push_back(startdata);
}
if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
}
++mInputBufferCounter;
}
// when we get a buffer back from the codec
void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
CHECK_NE(mState, UNINITIALIZED);
if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
int32_t flags = 0;
(void) buffer->meta()->findInt32("flags", &flags);
// some of these frames, we don't want to count
// standalone EOS.... has an invalid timestamp
if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
mBytesEncoded += buffer->size();
mFramesEncoded++;
Mutex::Autolock al(mOutputStatsLock);
int64_t timeUs = 0;
if (buffer->meta()->findInt64("timeUs", &timeUs)) {
if (timeUs > mLatestEncodedPtsUs) {
mLatestEncodedPtsUs = timeUs;
}
// can't chain as an else-if or this never triggers
if (timeUs < mEarliestEncodedPtsUs) {
mEarliestEncodedPtsUs = timeUs;
}
}
}
}
// mutex access to mBuffersInFlight and other stats
Mutex::Autolock al(mLatencyLock);
// how long this buffer took for the round trip through the codec
// NB: pipelining can/will make these times larger. e.g., if each packet
// is always 2 msec and we have 3 in flight at any given time, we're going to
// see "6 msec" as an answer.
// ignore stuff with no presentation time
if (presentationUs <= 0) {
ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
mLatencyUnknown++;
return;
}
if (mBatteryChecker != nullptr) {
mBatteryChecker->onCodecActivity([this] () {
mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
});
}
BufferFlightTiming_t startdata;
bool valid = false;
while (mBuffersInFlight.size() > 0) {
startdata = *mBuffersInFlight.begin();
ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
startdata.presentationUs, startdata.startedNs);
if (startdata.presentationUs == presentationUs) {
// a match
ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
startdata.presentationUs, presentationUs);
mBuffersInFlight.pop_front();
valid = true;
break;
} else if (startdata.presentationUs < presentationUs) {
// we must have missed the match for this, drop it and keep looking
ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
startdata.presentationUs, presentationUs);
mBuffersInFlight.pop_front();
continue;
} else {
// head is after, so we don't have a frame for ourselves
ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
" we have nothing to pair with",
startdata.presentationUs, presentationUs);
mLatencyUnknown++;
return;
}
}
if (!valid) {
ALOGV("-- empty queue, so ignore that.");
mLatencyUnknown++;
return;
}
// now start our calculations
const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
mLatencyHist.insert(latencyUs);
// push into the recent samples
{
Mutex::Autolock al(mRecentLock);
if (mRecentHead >= kRecentLatencyFrames) {
mRecentHead = 0;
}
mRecentSamples[mRecentHead++] = latencyUs;
}
}
bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
Mutex::Autolock al(mBufferLock);
BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
sp<MediaCodecBuffer> buffer = info->mData;
int32_t flags;
CHECK(buffer->meta()->findInt32("flags", &flags));
if (flags & BUFFER_FLAG_DECODE_ONLY) {
ALOGV("discardDecodeOnlyOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
info->mOwnedByClient = false;
info->mData.clear();
mBufferChannel->discardBuffer(buffer);
return true;
}
return false;
}
// static
status_t MediaCodec::PostAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response) {
status_t err = msg->postAndAwaitResponse(response);
if (err != OK) {
return err;
}
if (!(*response)->findInt32("err", &err)) {
err = OK;
}
return err;
}
void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
PostReplyWithError(replyID, err);
}
void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
int32_t finalErr = err;
if (mReleasedByResourceManager) {
// override the err code if MediaCodec has been released by ResourceManager.
finalErr = DEAD_OBJECT;
}
sp<AMessage> response = new AMessage;
response->setInt32("err", finalErr);
response->postReply(replyID);
}
static CodecBase *CreateCCodec() {
return new CCodec;
}
//static
sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
if (owner) {
if (strcmp(owner, "default") == 0) {
return new ACodec;
} else if (strncmp(owner, "codec2", 6) == 0) {
return CreateCCodec();
}
}
if (name.startsWithIgnoreCase("c2.")) {
return CreateCCodec();
} else if (name.startsWithIgnoreCase("omx.")) {
// at this time only ACodec specifies a mime type.
return new ACodec;
} else {
return NULL;
}
}
struct CodecListCache {
CodecListCache()
: mCodecInfoMap{[] {
const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
size_t count = mcl->countCodecs();
std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
for (size_t i = 0; i < count; ++i) {
sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
codecInfoMap.emplace(info->getCodecName(), info);
}
return codecInfoMap;
}()} {
}
const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
};
static const CodecListCache &GetCodecListCache() {
static CodecListCache sCache{};
return sCache;
}
status_t MediaCodec::init(const AString &name) {
status_t err = mResourceManagerProxy->init();
if (err != OK) {
mErrorLog.log(LOG_TAG, base::StringPrintf(
"Fatal error: failed to initialize ResourceManager (err=%d)", err));
mCodec = NULL; // remove the codec
return err;
}
// save init parameters for reset
mInitName = name;
// Current video decoders do not return from OMX_FillThisBuffer
// quickly, violating the OpenMAX specs, until that is remedied
// we need to invest in an extra looper to free the main event
// queue.
mCodecInfo.clear();
bool secureCodec = false;
const char *owner = "";
if (!name.startsWith("android.filter.")) {
err = mGetCodecInfo(name, &mCodecInfo);
if (err != OK) {
mErrorLog.log(LOG_TAG, base::StringPrintf(
"Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
mCodec = NULL; // remove the codec.
return err;
}
if (mCodecInfo == nullptr) {
mErrorLog.log(LOG_TAG, base::StringPrintf(
"Getting codec info with name '%s' failed", name.c_str()));
return NAME_NOT_FOUND;
}
secureCodec = name.endsWith(".secure");
Vector<AString> mediaTypes;
mCodecInfo->getSupportedMediaTypes(&mediaTypes);
for (size_t i = 0; i < mediaTypes.size(); ++i) {
if (mediaTypes[i].startsWith("video/")) {
mDomain = DOMAIN_VIDEO;
break;
} else if (mediaTypes[i].startsWith("audio/")) {
mDomain = DOMAIN_AUDIO;
break;
} else if (mediaTypes[i].startsWith("image/")) {
mDomain = DOMAIN_IMAGE;
break;
}
}
owner = mCodecInfo->getOwnerName();
}
mCodec = mGetCodecBase(name, owner);
if (mCodec == NULL) {
mErrorLog.log(LOG_TAG, base::StringPrintf(
"Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
return NAME_NOT_FOUND;
}
if (mDomain == DOMAIN_VIDEO) {
// video codec needs dedicated looper
if (mCodecLooper == NULL) {
status_t err = OK;
mCodecLooper = new ALooper;
mCodecLooper->setName("CodecLooper");
err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
if (OK != err) {
mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
return err;
}
}
mCodecLooper->registerHandler(mCodec);
} else {
mLooper->registerHandler(mCodec);
}
mLooper->registerHandler(this);
mCodec->setCallback(
std::unique_ptr<CodecBase::CodecCallback>(
new CodecCallback(new AMessage(kWhatCodecNotify, this))));
mBufferChannel = mCodec->getBufferChannel();
mBufferChannel->setCallback(
std::unique_ptr<CodecBase::BufferCallback>(
new BufferCallback(new AMessage(kWhatCodecNotify, this))));
sp<AMessage> msg = new AMessage(kWhatInit, this);
if (mCodecInfo) {
msg->setObject("codecInfo", mCodecInfo);
// name may be different from mCodecInfo->getCodecName() if we stripped
// ".secure"
}
msg->setString("name", name);
// initial naming setup covers the period before the first call to ::configure().
// after that, we manage this through ::configure() and the setup message.
if (mMetricsHandle != 0) {
mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
}
if (mDomain == DOMAIN_VIDEO) {
mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
}
// If the ComponentName is not set yet, use the name passed by the user.
if (mComponentName.empty()) {
mIsHardware = !MediaCodecList::isSoftwareCodec(name);
mResourceManagerProxy->setCodecName(name.c_str());
}
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(secureCodec,
toMediaResourceSubType(mIsHardware, mDomain)));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
if (!mResourceManagerProxy->reclaimResource(resources)) {
break;
}
}
sp<AMessage> response;
err = PostAndAwaitResponse(msg, &response);
if (!isResourceError(err)) {
break;
}
}
if (OK == err) {
// Notify the ResourceManager that, this codec has been created
// (initialized) successfully.
mResourceManagerProxy->notifyClientCreated();
}
return err;
}
status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
msg->setMessage("callback", callback);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> &notify) {
sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
msg->setMessage("on-frame-rendered", notify);
return msg->post();
}
status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
msg->setMessage("first-tunnel-frame-ready", notify);
return msg->post();
}
/*
* MediaFormat Shaping forward declarations
* including the property name we use for control.
*/
static int enableMediaFormatShapingDefault = 1;
static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
bool reverse);
mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
uint32_t flags,
status_t* err) {
*err = OK;
mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
// TODO: validity check log-session-id: it should be a 32-hex-digit.
format->findString("log-session-id", &mLogSessionId);
if (nextMetricsHandle != 0) {
mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
int32_t profile = 0;
if (format->findInt32("profile", &profile)) {
mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
}
int32_t level = 0;
if (format->findInt32("level", &level)) {
mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
}
mediametrics_setInt32(nextMetricsHandle, kCodecEncoder, isEncoder);
if (!mLogSessionId.empty()) {
mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
}
// moved here from ::init()
mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
}
if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
format->findInt32("width", &mWidth);
format->findInt32("height", &mHeight);
if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
mRotationDegrees = 0;
}
if (nextMetricsHandle != 0) {
mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
int32_t maxWidth = 0;
if (format->findInt32("max-width", &maxWidth)) {
mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
}
int32_t maxHeight = 0;
if (format->findInt32("max-height", &maxHeight)) {
mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
}
int32_t colorFormat = -1;
if (format->findInt32("color-format", &colorFormat)) {
mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
}
int32_t appMaxInputSize = -1;
if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
}
if (mDomain == DOMAIN_VIDEO) {
float frameRate = -1.0;
if (format->findFloat("frame-rate", &frameRate)) {
mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
}
float captureRate = -1.0;
if (format->findFloat("capture-rate", &captureRate)) {
mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
}
float operatingRate = -1.0;
if (format->findFloat("operating-rate", &operatingRate)) {
mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
}
int32_t priority = -1;
if (format->findInt32("priority", &priority)) {
mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
}
}
}
// Prevent possible integer overflow in downstream code.
if (mWidth < 0 || mHeight < 0 ||
(uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
mErrorLog.log(LOG_TAG, base::StringPrintf(
"Invalid size(s), width=%d, height=%d", mWidth, mHeight));
mediametrics_delete(nextMetricsHandle);
// Set the error code and return null handle.
*err = BAD_VALUE;
return 0;
}
} else {
if (nextMetricsHandle != 0) {
int32_t channelCount;
if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
}
int32_t sampleRate;
if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
}
}
}
if (isEncoder) {
int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
enableMediaFormatShapingDefault);
if (!enableShaping) {
ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
if (nextMetricsHandle != 0) {
mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
}
} else {
(void) shapeMediaFormat(format, flags, nextMetricsHandle);
// XXX: do we want to do this regardless of shaping enablement?
mapFormat(mComponentName, format, nullptr, false);
}
}
// push min/max QP to MediaMetrics after shaping
if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
int32_t qpIMin = -1;
if (format->findInt32("video-qp-i-min", &qpIMin)) {
mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
}
int32_t qpIMax = -1;
if (format->findInt32("video-qp-i-max", &qpIMax)) {
mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
}
int32_t qpPMin = -1;
if (format->findInt32("video-qp-p-min", &qpPMin)) {
mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
}
int32_t qpPMax = -1;
if (format->findInt32("video-qp-p-max", &qpPMax)) {
mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
}
int32_t qpBMin = -1;
if (format->findInt32("video-qp-b-min", &qpBMin)) {
mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
}
int32_t qpBMax = -1;
if (format->findInt32("video-qp-b-max", &qpBMax)) {
mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
}
}
updateLowLatency(format);
return nextMetricsHandle;
}
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &nativeWindow,
const sp<ICrypto> &crypto,
uint32_t flags) {
return configure(format, nativeWindow, crypto, NULL, flags);
}
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &surface,
const sp<ICrypto> &crypto,
const sp<IDescrambler> &descrambler,
uint32_t flags) {
// Update the codec importance.
updateCodecImportance(format);
// Create and set up metrics for this codec.
status_t err = OK;
mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
if (err != OK) {
return err;
}
sp<AMessage> msg = new AMessage(kWhatConfigure, this);
msg->setMessage("format", format);
msg->setInt32("flags", flags);
msg->setObject("surface", surface);
if (crypto != NULL || descrambler != NULL) {
if (crypto != NULL) {
msg->setPointer("crypto", crypto.get());
} else {
msg->setPointer("descrambler", descrambler.get());
}
if (nextMetricsHandle != 0) {
mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
}
} else if (mFlags & kFlagIsSecure) {
if (android::media::codec::provider_->secure_codecs_require_crypto()) {
mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
return INVALID_OPERATION;
} else {
ALOGW("Crypto or descrambler should be given for secure codec");
}
}
if (mConfigureMsg != nullptr) {
// if re-configuring, we have one of these from before.
// Recover the space before we discard the old mConfigureMsg
mediametrics_handle_t metricsHandle;
if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
mediametrics_delete(metricsHandle);
}
}
msg->setInt64("metrics", nextMetricsHandle);
// save msg for reset
mConfigureMsg = msg;
sp<AMessage> callback = mCallback;
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
toMediaResourceSubType(mIsHardware, mDomain)));
if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
resources.push_back(MediaResource::GraphicMemoryResource(1));
}
for (int i = 0; i <= kMaxRetry; ++i) {
sp<AMessage> response;
err = PostAndAwaitResponse(msg, &response);
if (err != OK && err != INVALID_OPERATION) {
if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
break;
}
// MediaCodec now set state to UNINITIALIZED upon any fatal error.
// To maintain backward-compatibility, do a reset() to put codec
// back into INITIALIZED state.
// But don't reset if the err is INVALID_OPERATION, which means
// the configure failure is due to wrong state.
ALOGE("configure failed with err 0x%08x, resetting...", err);
status_t err2 = reset();
if (err2 != OK) {
ALOGE("retrying configure: failed to reset codec (%08x)", err2);
break;
}
if (callback != nullptr) {
err2 = setCallback(callback);
if (err2 != OK) {
ALOGE("retrying configure: failed to set callback (%08x)", err2);
break;
}
}
}
if (!isResourceError(err)) {
break;
}
}
return err;
}
// Media Format Shaping support
//
static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
static bool sIsHandheld = true;
static bool connectFormatShaper() {
static std::once_flag sCheckOnce;
ALOGV("connectFormatShaper...");
std::call_once(sCheckOnce, [&](){
void *libHandle = NULL;
nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
// prefer any copy in the mainline module
//
android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
AString libraryName = "libmediaformatshaper.so";
if (mediaNs != NULL) {
static const android_dlextinfo dlextinfo = {
.flags = ANDROID_DLEXT_USE_NAMESPACE,
.library_namespace = mediaNs,
};
AString libraryMainline = "/apex/com.android.media/";
#if __LP64__
libraryMainline.append("lib64/");
#else
libraryMainline.append("lib/");
#endif
libraryMainline.append(libraryName);
libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
&dlextinfo);
if (libHandle != NULL) {
sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
dlsym(libHandle, "shaper_ops");
} else {
ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
libraryMainline.c_str());
}
} else {
ALOGV("connectFormatShaper: couldn't find media namespace.");
}
// fall back to the system partition, if present.
//
if (sShaperOps == NULL) {
libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
if (libHandle != NULL) {
sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
dlsym(libHandle, "shaper_ops");
} else {
ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
}
}
if (sShaperOps != nullptr
&& sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
sShaperOps->version);
sShaperOps = nullptr;
}
if (sShaperOps != nullptr) {
ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
}
nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
(loading_finished - loading_started)/1000);
// we also want to know whether this is a handheld device
// start with assumption that the device is handheld.
sIsHandheld = true;
sp<IServiceManager> serviceMgr = defaultServiceManager();
sp<content::pm::IPackageManagerNative> packageMgr;
if (serviceMgr.get() != nullptr) {
sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
}
// if we didn't get serviceMgr, we'll leave packageMgr as default null
if (packageMgr != nullptr) {
// MUST have these
static const String16 featuresNeeded[] = {
String16("android.hardware.touchscreen")
};
// these must be present to be a handheld
for (::android::String16 required : featuresNeeded) {
bool hasFeature = false;
binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
if (!status.isOk()) {
ALOGE("%s: hasSystemFeature failed: %s",
__func__, status.exceptionMessage().c_str());
continue;
}
ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
if (!hasFeature) {
ALOGV("... which means we are not handheld");
sIsHandheld = false;
break;
}
}
// MUST NOT have these
static const String16 featuresDisallowed[] = {
String16("android.hardware.type.automotive"),
String16("android.hardware.type.television"),
String16("android.hardware.type.watch")
};
// any of these present -- we aren't a handheld
for (::android::String16 forbidden : featuresDisallowed) {
bool hasFeature = false;
binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
if (!status.isOk()) {
ALOGE("%s: hasSystemFeature failed: %s",
__func__, status.exceptionMessage().c_str());
continue;
}
ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
if (hasFeature) {
ALOGV("... which means we are not handheld");
sIsHandheld = false;
break;
}
}
}
});
return true;
}
#if 0
// a construct to force the above dlopen() to run very early.
// goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
// failure of this means that cold start of those apps is slower by the time to dlopen()
// TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
//
static bool forceEarlyLoadingShaper = connectFormatShaper();
#endif
// parse the codec's properties: mapping, whether it meets min quality, etc
// and pass them into the video quality code
//
static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
sp<MediaCodecInfo> codecInfo, AString mediaType) {
sp<MediaCodecInfo::Capabilities> capabilities =
codecInfo->getCapabilitiesFor(mediaType.c_str());
if (capabilities == nullptr) {
ALOGI("no capabilities as part of the codec?");
} else {
const sp<AMessage> &details = capabilities->getDetails();
AString mapTarget;
int count = details->countEntries();
for(int ix = 0; ix < count; ix++) {
AMessage::Type entryType;
const char *mapSrc = details->getEntryNameAt(ix, &entryType);
// XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
//
static const char *featurePrefix = "feature-";
static const int featurePrefixLen = strlen(featurePrefix);
static const char *tuningPrefix = "tuning-";
static const int tuningPrefixLen = strlen(tuningPrefix);
static const char *mappingPrefix = "mapping-";
static const int mappingPrefixLen = strlen(mappingPrefix);
if (mapSrc == NULL) {
continue;
} else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
int32_t intValue;
if (details->findInt32(mapSrc, &intValue)) {
ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
(void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
intValue);
}
continue;
} else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
AString value;
if (details->findString(mapSrc, &value)) {
ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
(void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
value.c_str());
}
continue;
} else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
AString target;
if (details->findString(mapSrc, &target)) {
ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
target.c_str());
// key is really "kind-key"
// separate that, so setMap() sees the triple kind, key, value
const char *kind = &mapSrc[mappingPrefixLen];
const char *sep = strchr(kind, '-');
const char *key = sep+1;
if (sep != NULL) {
std::string xkind = std::string(kind, sep-kind);
(void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
key, target.